[ 530.477378] env[69328]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=69328) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 530.477740] env[69328]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=69328) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 530.477866] env[69328]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=69328) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 530.478154] env[69328]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 530.578370] env[69328]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=69328) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:349}} [ 530.588014] env[69328]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=69328) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:372}} [ 530.630538] env[69328]: INFO oslo_service.periodic_task [-] Skipping periodic task _heal_instance_info_cache because its interval is negative [ 531.188784] env[69328]: INFO nova.virt.driver [None req-ee8b8226-e3e6-4ae2-a951-d0b9b4f22178 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 531.259504] env[69328]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 531.259676] env[69328]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 531.259778] env[69328]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=69328) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 534.185382] env[69328]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-d93d841f-933b-4ef1-84eb-c3ac7b9f2e31 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.201944] env[69328]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=69328) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 534.202117] env[69328]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-6d204f0c-c7a4-47cd-ae5a-a69574ac3535 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.238067] env[69328]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 4bed5. [ 534.238209] env[69328]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 2.979s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 534.238831] env[69328]: INFO nova.virt.vmwareapi.driver [None req-ee8b8226-e3e6-4ae2-a951-d0b9b4f22178 None None] VMware vCenter version: 7.0.3 [ 534.242357] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d473c0ba-a413-4aaa-8bf5-c993000c8595 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.259802] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a136b51-bd93-4cea-9902-5dd76ea35100 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.265859] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-529db919-68c4-4682-af92-badf6033e424 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.272712] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d63ff07d-cfcb-4be6-8cd3-fe09effe9162 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.285789] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84d5e057-1aaf-454e-836c-3e1ceb07ca75 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.291902] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b25a3e22-9e84-4d80-9c1e-dc9a68e972b6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.322567] env[69328]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-f18a8791-6cbc-49d2-9b28-a83c453eb41b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.327853] env[69328]: DEBUG nova.virt.vmwareapi.driver [None req-ee8b8226-e3e6-4ae2-a951-d0b9b4f22178 None None] Extension org.openstack.compute already exists. {{(pid=69328) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:228}} [ 534.330573] env[69328]: INFO nova.compute.provider_config [None req-ee8b8226-e3e6-4ae2-a951-d0b9b4f22178 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 534.834045] env[69328]: DEBUG nova.context [None req-ee8b8226-e3e6-4ae2-a951-d0b9b4f22178 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),cb9a8651-3d2e-4d05-a422-aa364ebe7ed6(cell1) {{(pid=69328) load_cells /opt/stack/nova/nova/context.py:464}} [ 534.836168] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 534.836469] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 534.837189] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 534.838166] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] Acquiring lock "cb9a8651-3d2e-4d05-a422-aa364ebe7ed6" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 534.838373] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] Lock "cb9a8651-3d2e-4d05-a422-aa364ebe7ed6" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 534.839571] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] Lock "cb9a8651-3d2e-4d05-a422-aa364ebe7ed6" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 534.860019] env[69328]: INFO dbcounter [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] Registered counter for database nova_cell0 [ 534.868329] env[69328]: INFO dbcounter [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] Registered counter for database nova_cell1 [ 535.309855] env[69328]: DEBUG oslo_db.sqlalchemy.engines [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=69328) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:397}} [ 535.310308] env[69328]: DEBUG oslo_db.sqlalchemy.engines [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=69328) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:397}} [ 535.315273] env[69328]: ERROR nova.db.main.api [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 272, in main [ 535.315273] env[69328]: result = function(*args, **kwargs) [ 535.315273] env[69328]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 535.315273] env[69328]: return func(*args, **kwargs) [ 535.315273] env[69328]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 535.315273] env[69328]: result = fn(*args, **kwargs) [ 535.315273] env[69328]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 535.315273] env[69328]: return f(*args, **kwargs) [ 535.315273] env[69328]: File "/opt/stack/nova/nova/objects/service.py", line 557, in _db_service_get_minimum_version [ 535.315273] env[69328]: return db.service_get_minimum_version(context, binaries) [ 535.315273] env[69328]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 535.315273] env[69328]: _check_db_access() [ 535.315273] env[69328]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 535.315273] env[69328]: stacktrace = ''.join(traceback.format_stack()) [ 535.315273] env[69328]: [ 535.316097] env[69328]: ERROR nova.db.main.api [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 272, in main [ 535.316097] env[69328]: result = function(*args, **kwargs) [ 535.316097] env[69328]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 535.316097] env[69328]: return func(*args, **kwargs) [ 535.316097] env[69328]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 535.316097] env[69328]: result = fn(*args, **kwargs) [ 535.316097] env[69328]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 535.316097] env[69328]: return f(*args, **kwargs) [ 535.316097] env[69328]: File "/opt/stack/nova/nova/objects/service.py", line 557, in _db_service_get_minimum_version [ 535.316097] env[69328]: return db.service_get_minimum_version(context, binaries) [ 535.316097] env[69328]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 535.316097] env[69328]: _check_db_access() [ 535.316097] env[69328]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 535.316097] env[69328]: stacktrace = ''.join(traceback.format_stack()) [ 535.316097] env[69328]: [ 535.316561] env[69328]: WARNING nova.objects.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] Failed to get minimum service version for cell cb9a8651-3d2e-4d05-a422-aa364ebe7ed6 [ 535.316684] env[69328]: WARNING nova.objects.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 535.317146] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] Acquiring lock "singleton_lock" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 535.317307] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] Acquired lock "singleton_lock" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 535.317551] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] Releasing lock "singleton_lock" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 535.317877] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] Full set of CONF: {{(pid=69328) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/service.py:357}} [ 535.318038] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] ******************************************************************************** {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2804}} [ 535.318170] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] Configuration options gathered from: {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2805}} [ 535.318307] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 535.318533] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 535.318663] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] ================================================================================ {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 535.318871] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] allow_resize_to_same_host = True {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.319087] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] arq_binding_timeout = 300 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.319229] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] backdoor_port = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.319357] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] backdoor_socket = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.319527] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] block_device_allocate_retries = 60 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.319686] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] block_device_allocate_retries_interval = 3 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.319857] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cert = self.pem {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.320036] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.320213] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] compute_monitors = [] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.320384] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] config_dir = [] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.320785] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] config_drive_format = iso9660 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.320931] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.321123] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] config_source = [] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.321299] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] console_host = devstack {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.321467] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] control_exchange = nova {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.321629] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cpu_allocation_ratio = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.321790] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] daemon = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.321957] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] debug = True {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.322127] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] default_access_ip_network_name = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.322293] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] default_availability_zone = nova {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.322450] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] default_ephemeral_format = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.322608] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] default_green_pool_size = 1000 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.322844] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.323010] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] default_schedule_zone = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.323172] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] disk_allocation_ratio = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.323332] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] enable_new_services = True {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.323509] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] enabled_apis = ['osapi_compute'] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.323671] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] enabled_ssl_apis = [] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.323830] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] flat_injected = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.323986] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] force_config_drive = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.324159] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] force_raw_images = True {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.324330] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] graceful_shutdown_timeout = 5 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.324490] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] heal_instance_info_cache_interval = -1 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.324714] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] host = cpu-1 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.324892] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] initial_cpu_allocation_ratio = 4.0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.325066] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] initial_disk_allocation_ratio = 1.0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.325232] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] initial_ram_allocation_ratio = 1.0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.325452] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.325618] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] instance_build_timeout = 0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.325778] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] instance_delete_interval = 300 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.325944] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] instance_format = [instance: %(uuid)s] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.326123] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] instance_name_template = instance-%08x {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.326288] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] instance_usage_audit = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.326481] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] instance_usage_audit_period = month {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.326665] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.326831] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] instances_path = /opt/stack/data/nova/instances {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.326996] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] internal_service_availability_zone = internal {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.327167] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] key = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.327330] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] live_migration_retry_count = 30 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.327500] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] log_color = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.327666] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] log_config_append = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.327831] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.327989] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] log_dir = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.328159] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] log_file = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.328289] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] log_options = True {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.328477] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] log_rotate_interval = 1 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.328679] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] log_rotate_interval_type = days {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.328853] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] log_rotation_type = none {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.328985] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.329127] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.329298] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.329464] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.329594] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.329755] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] long_rpc_timeout = 1800 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.329911] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] max_concurrent_builds = 10 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.330089] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] max_concurrent_live_migrations = 1 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.330249] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] max_concurrent_snapshots = 5 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.330403] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] max_local_block_devices = 3 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.330558] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] max_logfile_count = 30 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.330747] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] max_logfile_size_mb = 200 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.330941] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] maximum_instance_delete_attempts = 5 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.331129] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] metadata_listen = 0.0.0.0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.331299] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] metadata_listen_port = 8775 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.331466] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] metadata_workers = 2 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.331628] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] migrate_max_retries = -1 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.331794] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] mkisofs_cmd = genisoimage {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.331996] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] my_block_storage_ip = 10.180.1.21 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.332140] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] my_ip = 10.180.1.21 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.332344] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] my_shared_fs_storage_ip = 10.180.1.21 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.332505] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] network_allocate_retries = 0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.332681] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.332846] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] osapi_compute_listen = 0.0.0.0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.333021] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] osapi_compute_listen_port = 8774 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.333189] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] osapi_compute_unique_server_name_scope = {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.333356] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] osapi_compute_workers = 2 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.333513] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] password_length = 12 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.333671] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] periodic_enable = True {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.333826] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] periodic_fuzzy_delay = 60 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.333992] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] pointer_model = usbtablet {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.334170] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] preallocate_images = none {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.334328] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] publish_errors = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.334455] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] pybasedir = /opt/stack/nova {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.334609] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] ram_allocation_ratio = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.334762] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] rate_limit_burst = 0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.334924] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] rate_limit_except_level = CRITICAL {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.335089] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] rate_limit_interval = 0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.335248] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] reboot_timeout = 0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.335403] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] reclaim_instance_interval = 0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.335555] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] record = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.335720] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] reimage_timeout_per_gb = 60 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.335882] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] report_interval = 120 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.336048] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] rescue_timeout = 0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.336207] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] reserved_host_cpus = 0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.336362] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] reserved_host_disk_mb = 0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.336544] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] reserved_host_memory_mb = 512 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.336712] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] reserved_huge_pages = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.336870] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] resize_confirm_window = 0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.337038] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] resize_fs_using_block_device = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.337200] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] resume_guests_state_on_host_boot = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.337366] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.337522] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] rpc_response_timeout = 60 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.337680] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] run_external_periodic_tasks = True {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.337844] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] running_deleted_instance_action = reap {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.337997] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] running_deleted_instance_poll_interval = 1800 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.338167] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] running_deleted_instance_timeout = 0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.338343] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] scheduler_instance_sync_interval = 120 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.338525] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] service_down_time = 720 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.338695] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] servicegroup_driver = db {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.338847] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] shell_completion = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.339009] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] shelved_offload_time = 0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.339173] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] shelved_poll_interval = 3600 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.339338] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] shutdown_timeout = 0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.339499] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] source_is_ipv6 = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.339652] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] ssl_only = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.339895] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.340072] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] sync_power_state_interval = 600 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.340236] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] sync_power_state_pool_size = 1000 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.340401] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] syslog_log_facility = LOG_USER {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.340589] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] tempdir = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.340782] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] timeout_nbd = 10 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.340963] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] transport_url = **** {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.341140] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] update_resources_interval = 0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.341301] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] use_cow_images = True {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.341462] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] use_journal = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.341618] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] use_json = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.341775] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] use_rootwrap_daemon = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.341930] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] use_stderr = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.342098] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] use_syslog = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.342253] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vcpu_pin_set = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.342420] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vif_plugging_is_fatal = True {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.342592] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vif_plugging_timeout = 300 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.342747] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] virt_mkfs = [] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.342934] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] volume_usage_poll_interval = 0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.343114] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] watch_log_file = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.343283] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] web = /usr/share/spice-html5 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 535.343467] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.343641] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.343793] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] os_brick.wait_mpath_device_interval = 1 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.343959] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_concurrency.disable_process_locking = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.344259] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.344441] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.344609] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.344778] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_messaging_metrics.metrics_process_name = {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.344947] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.345129] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.345321] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] api.auth_strategy = keystone {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.345486] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] api.compute_link_prefix = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.345663] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.345835] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] api.dhcp_domain = novalocal {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.346010] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] api.enable_instance_password = True {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.346181] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] api.glance_link_prefix = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.346343] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.346549] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] api.instance_list_cells_batch_strategy = distributed {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.346728] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] api.instance_list_per_project_cells = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.346890] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] api.list_records_by_skipping_down_cells = True {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.347063] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] api.local_metadata_per_cell = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.347235] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] api.max_limit = 1000 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.347402] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] api.metadata_cache_expiration = 15 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.347579] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] api.neutron_default_tenant_id = default {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.347748] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] api.response_validation = warn {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.347918] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] api.use_neutron_default_nets = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.348094] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.348257] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] api.vendordata_dynamic_failure_fatal = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.348450] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.348637] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] api.vendordata_dynamic_ssl_certfile = {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.348811] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] api.vendordata_dynamic_targets = [] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.348976] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] api.vendordata_jsonfile_path = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.349177] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] api.vendordata_providers = ['StaticJSON'] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.349374] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cache.backend = dogpile.cache.memcached {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.349600] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cache.backend_argument = **** {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.349784] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cache.backend_expiration_time = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.349959] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cache.config_prefix = cache.oslo {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.350152] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cache.dead_timeout = 60.0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.350318] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cache.debug_cache_backend = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.350481] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cache.enable_retry_client = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.350648] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cache.enable_socket_keepalive = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.350844] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cache.enabled = True {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.351029] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cache.enforce_fips_mode = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.351201] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cache.expiration_time = 600 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.351364] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cache.hashclient_retry_attempts = 2 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.351531] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cache.hashclient_retry_delay = 1.0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.351697] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cache.memcache_dead_retry = 300 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.351855] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cache.memcache_password = **** {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.352026] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.352194] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.352356] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cache.memcache_pool_maxsize = 10 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.352517] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cache.memcache_pool_unused_timeout = 60 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.352721] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cache.memcache_sasl_enabled = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.352911] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cache.memcache_servers = ['localhost:11211'] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.353094] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cache.memcache_socket_timeout = 1.0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.353257] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cache.memcache_username = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.353422] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cache.proxies = [] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.353589] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cache.redis_db = 0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.353747] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cache.redis_password = **** {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.353915] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cache.redis_sentinel_service_name = mymaster {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.354102] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.354273] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cache.redis_server = localhost:6379 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.354439] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cache.redis_socket_timeout = 1.0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.354597] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cache.redis_username = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.354758] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cache.retry_attempts = 2 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.354951] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cache.retry_delay = 0.0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.355141] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cache.socket_keepalive_count = 1 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.355305] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cache.socket_keepalive_idle = 1 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.355465] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cache.socket_keepalive_interval = 1 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.355624] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cache.tls_allowed_ciphers = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.355781] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cache.tls_cafile = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.355936] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cache.tls_certfile = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.356109] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cache.tls_enabled = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.356311] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cache.tls_keyfile = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.356509] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cinder.auth_section = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.356778] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cinder.auth_type = password {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.356974] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cinder.cafile = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.357174] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cinder.catalog_info = volumev3::publicURL {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.357338] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cinder.certfile = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.357502] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cinder.collect_timing = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.357667] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cinder.cross_az_attach = True {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.357829] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cinder.debug = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.357985] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cinder.endpoint_template = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.358161] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cinder.http_retries = 3 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.358323] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cinder.insecure = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.358512] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cinder.keyfile = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.358691] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cinder.os_region_name = RegionOne {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.358857] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cinder.split_loggers = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.359024] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cinder.timeout = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.359198] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.359357] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] compute.cpu_dedicated_set = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.359519] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] compute.cpu_shared_set = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.359679] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] compute.image_type_exclude_list = [] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.359839] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] compute.live_migration_wait_for_vif_plug = True {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.359999] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] compute.max_concurrent_disk_ops = 0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.360171] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] compute.max_disk_devices_to_attach = -1 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.360330] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.360498] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.360661] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] compute.resource_provider_association_refresh = 300 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.360820] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.360979] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] compute.shutdown_retry_interval = 10 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.361191] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.361376] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] conductor.workers = 2 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.361553] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] console.allowed_origins = [] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.361713] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] console.ssl_ciphers = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.361880] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] console.ssl_minimum_version = default {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.362060] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] consoleauth.enforce_session_timeout = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.362230] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] consoleauth.token_ttl = 600 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.362402] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cyborg.cafile = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.362561] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cyborg.certfile = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.362725] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cyborg.collect_timing = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.362884] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cyborg.connect_retries = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.363054] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cyborg.connect_retry_delay = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.363216] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cyborg.endpoint_override = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.363379] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cyborg.insecure = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.363534] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cyborg.keyfile = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.363695] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cyborg.max_version = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.363851] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cyborg.min_version = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.364013] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cyborg.region_name = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.364176] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cyborg.retriable_status_codes = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.364331] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cyborg.service_name = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.364498] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cyborg.service_type = accelerator {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.364665] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cyborg.split_loggers = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.364819] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cyborg.status_code_retries = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.364975] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cyborg.status_code_retry_delay = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.365145] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cyborg.timeout = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.365325] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.365489] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] cyborg.version = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.365663] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] database.asyncio_connection = **** {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.365824] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] database.asyncio_slave_connection = **** {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.365995] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] database.backend = sqlalchemy {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.366181] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] database.connection = **** {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.366349] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] database.connection_debug = 0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.366543] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] database.connection_parameters = {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.366716] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] database.connection_recycle_time = 3600 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.366881] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] database.connection_trace = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.367053] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] database.db_inc_retry_interval = True {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.367225] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] database.db_max_retries = 20 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.367386] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] database.db_max_retry_interval = 10 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.367545] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] database.db_retry_interval = 1 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.367706] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] database.max_overflow = 50 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.367866] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] database.max_pool_size = 5 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.368033] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] database.max_retries = 10 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.368208] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] database.mysql_sql_mode = TRADITIONAL {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.368367] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] database.mysql_wsrep_sync_wait = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.368549] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] database.pool_timeout = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.368714] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] database.retry_interval = 10 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.368872] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] database.slave_connection = **** {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.369044] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] database.sqlite_synchronous = True {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.369210] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] database.use_db_reconnect = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.369380] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] api_database.asyncio_connection = **** {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.369535] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] api_database.asyncio_slave_connection = **** {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.369705] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] api_database.backend = sqlalchemy {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.369875] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] api_database.connection = **** {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.370058] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] api_database.connection_debug = 0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.370234] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] api_database.connection_parameters = {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.370396] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] api_database.connection_recycle_time = 3600 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.370556] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] api_database.connection_trace = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.370721] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] api_database.db_inc_retry_interval = True {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.370880] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] api_database.db_max_retries = 20 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.371049] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] api_database.db_max_retry_interval = 10 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.371214] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] api_database.db_retry_interval = 1 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.371374] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] api_database.max_overflow = 50 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.371535] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] api_database.max_pool_size = 5 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.371696] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] api_database.max_retries = 10 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.371866] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.372032] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] api_database.mysql_wsrep_sync_wait = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.372193] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] api_database.pool_timeout = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.372351] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] api_database.retry_interval = 10 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.372509] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] api_database.slave_connection = **** {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.372673] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] api_database.sqlite_synchronous = True {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.372847] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] devices.enabled_mdev_types = [] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.373031] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.373208] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] ephemeral_storage_encryption.default_format = luks {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.373370] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] ephemeral_storage_encryption.enabled = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.373532] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] ephemeral_storage_encryption.key_size = 512 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.373707] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] glance.api_servers = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.373871] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] glance.cafile = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.374039] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] glance.certfile = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.374205] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] glance.collect_timing = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.374360] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] glance.connect_retries = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.374515] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] glance.connect_retry_delay = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.374676] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] glance.debug = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.374838] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] glance.default_trusted_certificate_ids = [] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.374998] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] glance.enable_certificate_validation = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.375172] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] glance.enable_rbd_download = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.375329] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] glance.endpoint_override = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.375502] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] glance.insecure = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.375650] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] glance.keyfile = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.375808] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] glance.max_version = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.375961] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] glance.min_version = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.376131] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] glance.num_retries = 3 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.376298] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] glance.rbd_ceph_conf = {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.376475] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] glance.rbd_connect_timeout = 5 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.376661] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] glance.rbd_pool = {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.376812] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] glance.rbd_user = {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.376975] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] glance.region_name = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.377147] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] glance.retriable_status_codes = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.377303] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] glance.service_name = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.377472] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] glance.service_type = image {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.377635] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] glance.split_loggers = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.377789] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] glance.status_code_retries = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.377945] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] glance.status_code_retry_delay = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.378110] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] glance.timeout = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.378290] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.378470] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] glance.verify_glance_signatures = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.378635] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] glance.version = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.378801] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] guestfs.debug = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.378967] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] manila.auth_section = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.379143] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] manila.auth_type = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.379303] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] manila.cafile = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.379459] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] manila.certfile = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.379622] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] manila.collect_timing = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.379778] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] manila.connect_retries = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.379937] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] manila.connect_retry_delay = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.380105] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] manila.endpoint_override = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.380269] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] manila.insecure = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.380426] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] manila.keyfile = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.380587] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] manila.max_version = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.380743] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] manila.min_version = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.380899] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] manila.region_name = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.381067] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] manila.retriable_status_codes = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.381226] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] manila.service_name = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.381394] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] manila.service_type = shared-file-system {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.381557] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] manila.share_apply_policy_timeout = 10 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.381719] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] manila.split_loggers = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.381876] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] manila.status_code_retries = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.382043] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] manila.status_code_retry_delay = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.382206] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] manila.timeout = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.382387] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] manila.valid_interfaces = ['internal', 'public'] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.382548] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] manila.version = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.382717] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] mks.enabled = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.383086] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.383284] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] image_cache.manager_interval = 2400 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.383454] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] image_cache.precache_concurrency = 1 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.383627] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] image_cache.remove_unused_base_images = True {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.383797] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.383966] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.384158] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] image_cache.subdirectory_name = _base {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.384335] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] ironic.api_max_retries = 60 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.384499] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] ironic.api_retry_interval = 2 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.384660] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] ironic.auth_section = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.384820] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] ironic.auth_type = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.384979] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] ironic.cafile = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.385151] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] ironic.certfile = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.385317] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] ironic.collect_timing = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.385478] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] ironic.conductor_group = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.385637] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] ironic.connect_retries = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.385795] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] ironic.connect_retry_delay = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.385950] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] ironic.endpoint_override = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.386125] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] ironic.insecure = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.386283] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] ironic.keyfile = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.386460] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] ironic.max_version = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.386632] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] ironic.min_version = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.386798] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] ironic.peer_list = [] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.386954] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] ironic.region_name = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.387128] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] ironic.retriable_status_codes = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.387291] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] ironic.serial_console_state_timeout = 10 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.387449] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] ironic.service_name = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.387620] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] ironic.service_type = baremetal {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.387776] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] ironic.shard = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.387940] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] ironic.split_loggers = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.388108] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] ironic.status_code_retries = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.388269] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] ironic.status_code_retry_delay = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.388475] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] ironic.timeout = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.388646] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.388817] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] ironic.version = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.389030] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.389215] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] key_manager.fixed_key = **** {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.389401] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.389566] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] barbican.barbican_api_version = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.389724] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] barbican.barbican_endpoint = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.389892] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] barbican.barbican_endpoint_type = public {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.390061] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] barbican.barbican_region_name = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.390225] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] barbican.cafile = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.390383] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] barbican.certfile = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.390544] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] barbican.collect_timing = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.390707] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] barbican.insecure = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.390864] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] barbican.keyfile = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.391034] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] barbican.number_of_retries = 60 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.391200] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] barbican.retry_delay = 1 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.391362] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] barbican.send_service_user_token = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.391524] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] barbican.split_loggers = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.391682] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] barbican.timeout = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.391843] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] barbican.verify_ssl = True {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.391999] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] barbican.verify_ssl_path = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.392178] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] barbican_service_user.auth_section = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.392342] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] barbican_service_user.auth_type = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.392503] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] barbican_service_user.cafile = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.392661] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] barbican_service_user.certfile = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.392824] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] barbican_service_user.collect_timing = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.392984] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] barbican_service_user.insecure = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.393154] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] barbican_service_user.keyfile = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.393378] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] barbican_service_user.split_loggers = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.393644] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] barbican_service_user.timeout = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.393932] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vault.approle_role_id = **** {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.394150] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vault.approle_secret_id = **** {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.394337] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vault.kv_mountpoint = secret {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.394504] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vault.kv_path = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.394675] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vault.kv_version = 2 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.394837] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vault.namespace = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.394996] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vault.root_token_id = **** {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.395169] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vault.ssl_ca_crt_file = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.395339] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vault.timeout = 60.0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.395502] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vault.use_ssl = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.395675] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.395847] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] keystone.cafile = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.396013] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] keystone.certfile = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.396183] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] keystone.collect_timing = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.396345] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] keystone.connect_retries = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.396548] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] keystone.connect_retry_delay = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.396725] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] keystone.endpoint_override = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.396893] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] keystone.insecure = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.397063] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] keystone.keyfile = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.397224] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] keystone.max_version = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.397381] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] keystone.min_version = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.397541] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] keystone.region_name = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.397703] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] keystone.retriable_status_codes = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.397859] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] keystone.service_name = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.398038] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] keystone.service_type = identity {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.398206] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] keystone.split_loggers = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.398364] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] keystone.status_code_retries = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.398550] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] keystone.status_code_retry_delay = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.398715] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] keystone.timeout = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.398896] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.399068] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] keystone.version = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.399261] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.ceph_mount_options = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.399684] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.ceph_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.399875] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.connection_uri = {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.400053] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.cpu_mode = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.400226] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.cpu_model_extra_flags = [] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.400396] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.cpu_models = [] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.400568] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.cpu_power_governor_high = performance {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.400753] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.cpu_power_governor_low = powersave {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.400976] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.cpu_power_management = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.401177] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.401349] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.device_detach_attempts = 8 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.401516] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.device_detach_timeout = 20 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.401686] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.disk_cachemodes = [] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.401846] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.disk_prefix = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.402015] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.enabled_perf_events = [] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.402187] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.file_backed_memory = 0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.402351] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.gid_maps = [] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.402509] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.hw_disk_discard = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.402669] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.hw_machine_type = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.402837] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.images_rbd_ceph_conf = {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.402999] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.403176] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.403343] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.images_rbd_glance_store_name = {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.403511] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.images_rbd_pool = rbd {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.403684] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.images_type = default {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.403843] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.images_volume_group = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.404009] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.inject_key = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.404179] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.inject_partition = -2 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.404340] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.inject_password = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.404503] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.iscsi_iface = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.404665] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.iser_use_multipath = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.404826] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.live_migration_bandwidth = 0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.404988] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.live_migration_completion_timeout = 800 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.405162] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.live_migration_downtime = 500 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.405323] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.live_migration_downtime_delay = 75 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.405484] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.live_migration_downtime_steps = 10 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.405642] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.live_migration_inbound_addr = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.405803] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.live_migration_permit_auto_converge = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.405961] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.live_migration_permit_post_copy = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.406131] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.live_migration_scheme = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.406304] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.live_migration_timeout_action = abort {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.406487] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.live_migration_tunnelled = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.406659] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.live_migration_uri = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.406823] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.live_migration_with_native_tls = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.406982] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.max_queues = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.407159] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.mem_stats_period_seconds = 10 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.407389] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.407554] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.nfs_mount_options = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.407850] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.408037] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.num_aoe_discover_tries = 3 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.408207] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.num_iser_scan_tries = 5 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.408366] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.num_memory_encrypted_guests = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.408557] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.num_nvme_discover_tries = 5 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.408725] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.num_pcie_ports = 0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.408892] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.num_volume_scan_tries = 5 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.409070] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.pmem_namespaces = [] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.409233] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.quobyte_client_cfg = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.409520] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.409696] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.rbd_connect_timeout = 5 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.409862] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.410036] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.410200] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.rbd_secret_uuid = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.410355] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.rbd_user = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.410519] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.realtime_scheduler_priority = 1 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.410688] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.remote_filesystem_transport = ssh {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.410868] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.rescue_image_id = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.411038] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.rescue_kernel_id = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.411198] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.rescue_ramdisk_id = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.411369] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.rng_dev_path = /dev/urandom {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.411529] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.rx_queue_size = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.411701] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.smbfs_mount_options = {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.411994] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.412189] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.snapshot_compression = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.412353] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.snapshot_image_format = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.412575] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.412778] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.sparse_logical_volumes = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.412987] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.swtpm_enabled = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.413182] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.swtpm_group = tss {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.413355] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.swtpm_user = tss {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.413530] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.sysinfo_serial = unique {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.413690] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.tb_cache_size = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.413849] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.tx_queue_size = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.414020] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.uid_maps = [] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.414185] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.use_virtio_for_bridges = True {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.414357] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.virt_type = kvm {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.414524] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.volume_clear = zero {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.414687] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.volume_clear_size = 0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.414849] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.volume_enforce_multipath = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.415021] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.volume_use_multipath = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.415185] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.vzstorage_cache_path = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.415356] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.415526] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.vzstorage_mount_group = qemu {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.415693] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.vzstorage_mount_opts = [] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.415863] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.416176] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.416361] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.vzstorage_mount_user = stack {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.416550] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.416731] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] neutron.auth_section = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.416908] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] neutron.auth_type = password {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.417085] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] neutron.cafile = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.417249] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] neutron.certfile = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.417410] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] neutron.collect_timing = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.417570] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] neutron.connect_retries = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.417728] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] neutron.connect_retry_delay = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.417899] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] neutron.default_floating_pool = public {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.418067] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] neutron.endpoint_override = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.418233] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] neutron.extension_sync_interval = 600 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.418392] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] neutron.http_retries = 3 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.418584] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] neutron.insecure = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.418750] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] neutron.keyfile = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.418910] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] neutron.max_version = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.419096] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] neutron.metadata_proxy_shared_secret = **** {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.419257] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] neutron.min_version = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.419431] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] neutron.ovs_bridge = br-int {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.419594] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] neutron.physnets = [] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.419762] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] neutron.region_name = RegionOne {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.419922] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] neutron.retriable_status_codes = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.420105] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] neutron.service_metadata_proxy = True {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.420265] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] neutron.service_name = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.420432] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] neutron.service_type = network {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.420597] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] neutron.split_loggers = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.420754] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] neutron.status_code_retries = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.420913] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] neutron.status_code_retry_delay = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.421082] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] neutron.timeout = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.421266] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.421430] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] neutron.version = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.421604] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] notifications.bdms_in_notifications = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.421781] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] notifications.default_level = INFO {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.421946] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] notifications.include_share_mapping = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.422135] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] notifications.notification_format = unversioned {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.422302] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] notifications.notify_on_state_change = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.422480] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.422656] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] pci.alias = [] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.422824] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] pci.device_spec = [] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.422988] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] pci.report_in_placement = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.423173] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] placement.auth_section = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.423346] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] placement.auth_type = password {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.423516] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] placement.auth_url = http://10.180.1.21/identity {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.423679] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] placement.cafile = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.423837] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] placement.certfile = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.423998] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] placement.collect_timing = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.424170] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] placement.connect_retries = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.424328] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] placement.connect_retry_delay = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.424484] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] placement.default_domain_id = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.424639] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] placement.default_domain_name = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.424792] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] placement.domain_id = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.424945] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] placement.domain_name = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.425113] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] placement.endpoint_override = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.425276] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] placement.insecure = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.425432] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] placement.keyfile = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.425590] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] placement.max_version = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.425745] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] placement.min_version = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.425912] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] placement.password = **** {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.426081] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] placement.project_domain_id = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.426251] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] placement.project_domain_name = Default {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.426429] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] placement.project_id = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.426622] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] placement.project_name = service {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.426794] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] placement.region_name = RegionOne {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.426957] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] placement.retriable_status_codes = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.427132] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] placement.service_name = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.427300] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] placement.service_type = placement {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.427462] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] placement.split_loggers = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.427620] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] placement.status_code_retries = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.427776] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] placement.status_code_retry_delay = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.427931] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] placement.system_scope = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.428097] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] placement.timeout = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.428255] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] placement.trust_id = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.428431] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] placement.user_domain_id = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.428635] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] placement.user_domain_name = Default {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.428802] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] placement.user_id = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.428977] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] placement.username = nova {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.429172] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.429333] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] placement.version = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.429544] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] quota.cores = 20 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.429733] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] quota.count_usage_from_placement = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.429889] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.430070] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] quota.injected_file_content_bytes = 10240 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.430242] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] quota.injected_file_path_length = 255 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.430408] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] quota.injected_files = 5 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.430571] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] quota.instances = 10 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.430734] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] quota.key_pairs = 100 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.430895] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] quota.metadata_items = 128 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.431068] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] quota.ram = 51200 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.431234] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] quota.recheck_quota = True {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.431400] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] quota.server_group_members = 10 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.431566] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] quota.server_groups = 10 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.431774] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] quota.unified_limits_resource_list = ['servers'] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.431947] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] quota.unified_limits_resource_strategy = require {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.432135] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.432301] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.432461] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] scheduler.image_metadata_prefilter = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.432623] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.432785] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] scheduler.max_attempts = 3 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.432944] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] scheduler.max_placement_results = 1000 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.433124] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.433289] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] scheduler.query_placement_for_image_type_support = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.433450] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.433624] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] scheduler.workers = 2 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.433801] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.433971] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.434161] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.434329] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.434496] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.434656] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.434817] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.435009] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.435182] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] filter_scheduler.host_subset_size = 1 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.435346] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.435503] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] filter_scheduler.image_properties_default_architecture = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.435665] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] filter_scheduler.image_props_weight_multiplier = 0.0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.435828] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] filter_scheduler.image_props_weight_setting = [] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.435997] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.436170] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] filter_scheduler.isolated_hosts = [] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.436330] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] filter_scheduler.isolated_images = [] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.436512] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] filter_scheduler.max_instances_per_host = 50 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.436682] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.436844] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.437014] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] filter_scheduler.pci_in_placement = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.437178] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.437337] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.437496] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.437659] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.437820] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.437981] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.438155] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] filter_scheduler.track_instance_changes = True {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.438330] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.438522] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] metrics.required = True {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.438693] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] metrics.weight_multiplier = 1.0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.438856] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] metrics.weight_of_unavailable = -10000.0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.439025] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] metrics.weight_setting = [] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.439350] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.439547] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] serial_console.enabled = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.439731] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] serial_console.port_range = 10000:20000 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.439905] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.440086] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.440260] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] serial_console.serialproxy_port = 6083 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.440428] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] service_user.auth_section = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.440602] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] service_user.auth_type = password {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.440759] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] service_user.cafile = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.440915] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] service_user.certfile = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.441094] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] service_user.collect_timing = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.441258] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] service_user.insecure = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.441414] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] service_user.keyfile = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.441586] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] service_user.send_service_user_token = True {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.441748] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] service_user.split_loggers = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.441904] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] service_user.timeout = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.442082] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] spice.agent_enabled = True {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.442246] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] spice.enabled = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.442551] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.442755] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] spice.html5proxy_host = 0.0.0.0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.442927] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] spice.html5proxy_port = 6082 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.443098] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] spice.image_compression = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.443258] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] spice.jpeg_compression = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.443416] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] spice.playback_compression = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.443580] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] spice.require_secure = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.443756] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] spice.server_listen = 127.0.0.1 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.443928] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.444217] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] spice.spice_direct_proxy_base_url = http://127.0.0.1:13002/nova {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.444388] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] spice.streaming_mode = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.444549] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] spice.zlib_compression = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.444717] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] upgrade_levels.baseapi = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.444890] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] upgrade_levels.compute = auto {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.445061] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] upgrade_levels.conductor = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.445223] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] upgrade_levels.scheduler = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.445391] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vendordata_dynamic_auth.auth_section = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.445555] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vendordata_dynamic_auth.auth_type = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.445719] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vendordata_dynamic_auth.cafile = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.445879] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vendordata_dynamic_auth.certfile = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.446054] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vendordata_dynamic_auth.collect_timing = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.446217] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vendordata_dynamic_auth.insecure = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.446375] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vendordata_dynamic_auth.keyfile = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.446565] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vendordata_dynamic_auth.split_loggers = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.446729] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vendordata_dynamic_auth.timeout = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.446903] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vmware.api_retry_count = 10 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.447078] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vmware.ca_file = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.447256] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vmware.cache_prefix = devstack-image-cache {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.447425] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vmware.cluster_name = testcl1 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.447593] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vmware.connection_pool_size = 10 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.447751] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vmware.console_delay_seconds = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.447922] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vmware.datastore_regex = ^datastore.* {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.448150] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.448329] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vmware.host_password = **** {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.448539] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vmware.host_port = 443 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.448766] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vmware.host_username = administrator@vsphere.local {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.448952] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vmware.insecure = True {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.449153] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vmware.integration_bridge = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.449328] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vmware.maximum_objects = 100 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.449518] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vmware.pbm_default_policy = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.449657] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vmware.pbm_enabled = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.449817] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vmware.pbm_wsdl_location = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.449987] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.450166] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vmware.serial_port_proxy_uri = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.450326] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vmware.serial_port_service_uri = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.450491] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vmware.task_poll_interval = 0.5 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.450752] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vmware.use_linked_clone = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.451044] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vmware.vnc_keymap = en-us {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.451250] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vmware.vnc_port = 5900 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.451425] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vmware.vnc_port_total = 10000 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.451616] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vnc.auth_schemes = ['none'] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.451793] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vnc.enabled = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.452102] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.452300] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.452478] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vnc.novncproxy_port = 6080 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.452668] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vnc.server_listen = 127.0.0.1 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.452848] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.453021] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vnc.vencrypt_ca_certs = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.453203] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vnc.vencrypt_client_cert = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.453366] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vnc.vencrypt_client_key = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.453548] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.453716] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] workarounds.disable_deep_image_inspection = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.453878] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] workarounds.disable_fallback_pcpu_query = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.454064] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] workarounds.disable_group_policy_check_upcall = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.454250] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.454413] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] workarounds.disable_rootwrap = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.454578] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] workarounds.enable_numa_live_migration = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.454738] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.454898] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.455083] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] workarounds.handle_virt_lifecycle_events = True {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.455257] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] workarounds.libvirt_disable_apic = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.455563] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] workarounds.never_download_image_if_on_rbd = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.455764] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.455908] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.456094] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.456307] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.456512] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.456678] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.456842] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.457014] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.457213] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.457405] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.457578] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] wsgi.client_socket_timeout = 900 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.457748] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] wsgi.default_pool_size = 1000 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.457914] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] wsgi.keep_alive = True {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.458104] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] wsgi.max_header_line = 16384 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.458279] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] wsgi.secure_proxy_ssl_header = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.458464] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] wsgi.ssl_ca_file = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.458638] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] wsgi.ssl_cert_file = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.458801] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] wsgi.ssl_key_file = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.458966] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] wsgi.tcp_keepidle = 600 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.459155] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.459323] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] zvm.ca_file = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.459506] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] zvm.cloud_connector_url = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.459811] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.459989] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] zvm.reachable_timeout = 300 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.460175] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.460353] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.460530] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] profiler.connection_string = messaging:// {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.460695] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] profiler.enabled = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.460863] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] profiler.es_doc_type = notification {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.461034] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] profiler.es_scroll_size = 10000 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.461205] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] profiler.es_scroll_time = 2m {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.461364] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] profiler.filter_error_trace = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.461530] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] profiler.hmac_keys = **** {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.461697] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] profiler.sentinel_service_name = mymaster {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.461860] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] profiler.socket_timeout = 0.1 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.462029] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] profiler.trace_requests = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.462206] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] profiler.trace_sqlalchemy = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.462381] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] profiler_jaeger.process_tags = {} {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.462539] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] profiler_jaeger.service_name_prefix = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.462700] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] profiler_otlp.service_name_prefix = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.462860] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] remote_debug.host = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.463023] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] remote_debug.port = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.463202] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.463365] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.463525] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.463690] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.463853] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.464026] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.464192] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.464353] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.464513] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.464682] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_messaging_rabbit.hostname = devstack {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.464841] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.465014] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.465188] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.465353] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.465513] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_messaging_rabbit.kombu_reconnect_splay = 0.0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.465684] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.465843] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.466009] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.466184] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.466349] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.466506] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.466670] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.466832] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.466993] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.467168] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.467327] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.467487] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.467649] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.467807] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.467965] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.468140] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_messaging_rabbit.ssl = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.468309] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.468509] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.468678] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.468845] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.469017] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_messaging_rabbit.ssl_version = {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.469179] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.469363] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.469558] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_messaging_notifications.retry = -1 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.469744] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.469917] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_messaging_notifications.transport_url = **** {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.470100] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_limit.auth_section = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.470268] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_limit.auth_type = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.470427] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_limit.cafile = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.470586] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_limit.certfile = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.470747] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_limit.collect_timing = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.470905] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_limit.connect_retries = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.471073] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_limit.connect_retry_delay = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.471233] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_limit.endpoint_id = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.471403] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_limit.endpoint_interface = publicURL {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.471561] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_limit.endpoint_override = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.471717] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_limit.endpoint_region_name = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.471873] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_limit.endpoint_service_name = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.472038] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_limit.endpoint_service_type = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.472201] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_limit.insecure = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.472358] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_limit.keyfile = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.472514] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_limit.max_version = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.472671] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_limit.min_version = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.472825] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_limit.region_name = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.472981] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_limit.retriable_status_codes = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.473151] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_limit.service_name = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.473308] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_limit.service_type = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.473467] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_limit.split_loggers = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.473624] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_limit.status_code_retries = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.473781] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_limit.status_code_retry_delay = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.473936] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_limit.timeout = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.474102] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_limit.valid_interfaces = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.474261] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_limit.version = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.474425] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_reports.file_event_handler = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.474590] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_reports.file_event_handler_interval = 1 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.474746] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] oslo_reports.log_dir = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.474915] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.475089] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vif_plug_linux_bridge_privileged.group = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.475251] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.475422] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.475591] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.475749] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vif_plug_linux_bridge_privileged.user = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.475919] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.476088] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vif_plug_ovs_privileged.group = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.476249] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vif_plug_ovs_privileged.helper_command = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.476415] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.476579] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.476738] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] vif_plug_ovs_privileged.user = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.476910] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] os_vif_linux_bridge.flat_interface = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.477102] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.477279] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.477451] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.477624] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.477794] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.477960] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.478134] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] os_vif_linux_bridge.vlan_interface = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.478313] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.478512] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] os_vif_ovs.isolate_vif = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.478683] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.478849] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.479027] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.479202] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] os_vif_ovs.ovsdb_interface = native {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.479364] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] os_vif_ovs.per_port_bridge = False {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.479558] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] privsep_osbrick.capabilities = [21] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.479725] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] privsep_osbrick.group = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.479882] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] privsep_osbrick.helper_command = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.480062] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.480228] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] privsep_osbrick.thread_pool_size = 8 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.480386] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] privsep_osbrick.user = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.480559] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.480717] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] nova_sys_admin.group = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.480871] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] nova_sys_admin.helper_command = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.481049] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.481213] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] nova_sys_admin.thread_pool_size = 8 {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.481368] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] nova_sys_admin.user = None {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 535.481497] env[69328]: DEBUG oslo_service.backend.eventlet.service [None req-b7380ca2-0a4f-44d8-9e68-2a80db0f8b4b None None] ******************************************************************************** {{(pid=69328) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2828}} [ 535.481911] env[69328]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 535.988020] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c66b03fd-92d7-42ca-b52e-634c39a0923a None None] Getting list of instances from cluster (obj){ [ 535.988020] env[69328]: value = "domain-c8" [ 535.988020] env[69328]: _type = "ClusterComputeResource" [ 535.988020] env[69328]: } {{(pid=69328) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 535.988020] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f3deffd-8911-4675-92ac-1c04203c1caa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.995489] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c66b03fd-92d7-42ca-b52e-634c39a0923a None None] Got total of 0 instances {{(pid=69328) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 535.996231] env[69328]: WARNING nova.virt.vmwareapi.driver [None req-c66b03fd-92d7-42ca-b52e-634c39a0923a None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 535.996818] env[69328]: INFO nova.virt.node [None req-c66b03fd-92d7-42ca-b52e-634c39a0923a None None] Generated node identity 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e [ 535.997173] env[69328]: INFO nova.virt.node [None req-c66b03fd-92d7-42ca-b52e-634c39a0923a None None] Wrote node identity 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e to /opt/stack/data/n-cpu-1/compute_id [ 536.501297] env[69328]: WARNING nova.compute.manager [None req-c66b03fd-92d7-42ca-b52e-634c39a0923a None None] Compute nodes ['149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 537.506074] env[69328]: INFO nova.compute.manager [None req-c66b03fd-92d7-42ca-b52e-634c39a0923a None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 538.512199] env[69328]: WARNING nova.compute.manager [None req-c66b03fd-92d7-42ca-b52e-634c39a0923a None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 538.512626] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c66b03fd-92d7-42ca-b52e-634c39a0923a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 538.512767] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c66b03fd-92d7-42ca-b52e-634c39a0923a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 538.512931] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c66b03fd-92d7-42ca-b52e-634c39a0923a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 538.513100] env[69328]: DEBUG nova.compute.resource_tracker [None req-c66b03fd-92d7-42ca-b52e-634c39a0923a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69328) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 538.514077] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1db67793-a296-4f1c-93ec-8b5c3feb9b78 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.522500] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f87f7d8f-efeb-449b-bd06-2344dcd3f5d5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.536779] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e114153-e906-46ce-89a4-116f6ab09ce5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.543300] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a55d6a9c-648f-479e-95c2-498d0ff7e880 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.571467] env[69328]: DEBUG nova.compute.resource_tracker [None req-c66b03fd-92d7-42ca-b52e-634c39a0923a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180963MB free_disk=116GB free_vcpus=48 pci_devices=None {{(pid=69328) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 538.571644] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c66b03fd-92d7-42ca-b52e-634c39a0923a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 538.571813] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c66b03fd-92d7-42ca-b52e-634c39a0923a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 539.074163] env[69328]: WARNING nova.compute.resource_tracker [None req-c66b03fd-92d7-42ca-b52e-634c39a0923a None None] No compute node record for cpu-1:149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e could not be found. [ 539.578524] env[69328]: INFO nova.compute.resource_tracker [None req-c66b03fd-92d7-42ca-b52e-634c39a0923a None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e [ 541.086348] env[69328]: DEBUG nova.compute.resource_tracker [None req-c66b03fd-92d7-42ca-b52e-634c39a0923a None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=69328) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 541.086746] env[69328]: DEBUG nova.compute.resource_tracker [None req-c66b03fd-92d7-42ca-b52e-634c39a0923a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=69328) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 541.236753] env[69328]: INFO nova.scheduler.client.report [None req-c66b03fd-92d7-42ca-b52e-634c39a0923a None None] [req-3b1181eb-33d2-4a63-8aec-5eb96c017b82] Created resource provider record via placement API for resource provider with UUID 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 541.253276] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c5dc3c5-43b2-415d-a791-31f4f1df6955 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.260854] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f797767-f197-4979-9f52-35d1afbefd87 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.290328] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f31253bc-25b2-43ab-9920-20d5d0e53219 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.297551] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2ad4c61-9ca1-4abb-8d01-1eab98f75c28 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.310513] env[69328]: DEBUG nova.compute.provider_tree [None req-c66b03fd-92d7-42ca-b52e-634c39a0923a None None] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 541.847189] env[69328]: DEBUG nova.scheduler.client.report [None req-c66b03fd-92d7-42ca-b52e-634c39a0923a None None] Updated inventory for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 541.847423] env[69328]: DEBUG nova.compute.provider_tree [None req-c66b03fd-92d7-42ca-b52e-634c39a0923a None None] Updating resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e generation from 0 to 1 during operation: update_inventory {{(pid=69328) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 541.847563] env[69328]: DEBUG nova.compute.provider_tree [None req-c66b03fd-92d7-42ca-b52e-634c39a0923a None None] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 541.898690] env[69328]: DEBUG nova.compute.provider_tree [None req-c66b03fd-92d7-42ca-b52e-634c39a0923a None None] Updating resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e generation from 1 to 2 during operation: update_traits {{(pid=69328) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 542.403740] env[69328]: DEBUG nova.compute.resource_tracker [None req-c66b03fd-92d7-42ca-b52e-634c39a0923a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69328) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 542.404098] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c66b03fd-92d7-42ca-b52e-634c39a0923a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.832s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 542.404146] env[69328]: DEBUG nova.service [None req-c66b03fd-92d7-42ca-b52e-634c39a0923a None None] Creating RPC server for service compute {{(pid=69328) start /opt/stack/nova/nova/service.py:186}} [ 542.416906] env[69328]: DEBUG nova.service [None req-c66b03fd-92d7-42ca-b52e-634c39a0923a None None] Join ServiceGroup membership for this service compute {{(pid=69328) start /opt/stack/nova/nova/service.py:203}} [ 542.417058] env[69328]: DEBUG nova.servicegroup.drivers.db [None req-c66b03fd-92d7-42ca-b52e-634c39a0923a None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=69328) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 574.420727] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._sync_power_states {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 574.925057] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Getting list of instances from cluster (obj){ [ 574.925057] env[69328]: value = "domain-c8" [ 574.925057] env[69328]: _type = "ClusterComputeResource" [ 574.925057] env[69328]: } {{(pid=69328) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 574.926529] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4312369-d287-4495-ad3e-41ca80c596b5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.936263] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Got total of 0 instances {{(pid=69328) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 574.936263] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 574.936263] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Getting list of instances from cluster (obj){ [ 574.936263] env[69328]: value = "domain-c8" [ 574.936263] env[69328]: _type = "ClusterComputeResource" [ 574.936263] env[69328]: } {{(pid=69328) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 574.937514] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7bcc333-1206-456c-935d-f494077c97f5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.947616] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Got total of 0 instances {{(pid=69328) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 580.477103] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Acquiring lock "88f9f0c2-0c55-45bf-a494-8f1ee4922443" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 580.477923] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Lock "88f9f0c2-0c55-45bf-a494-8f1ee4922443" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 580.979948] env[69328]: DEBUG nova.compute.manager [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 581.012658] env[69328]: DEBUG oslo_concurrency.lockutils [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Acquiring lock "676173ee-8001-48c6-bd28-09130f6dd99a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 581.014501] env[69328]: DEBUG oslo_concurrency.lockutils [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Lock "676173ee-8001-48c6-bd28-09130f6dd99a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.003s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 581.314049] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Acquiring lock "49a668a7-5967-46a9-823f-7f613d34d152" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 581.314272] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Lock "49a668a7-5967-46a9-823f-7f613d34d152" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 581.514387] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 581.514669] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 581.516895] env[69328]: INFO nova.compute.claims [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 581.519669] env[69328]: DEBUG nova.compute.manager [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 581.797229] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Acquiring lock "230c6278-65af-4f5d-b817-0b695086c29d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 581.798811] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Lock "230c6278-65af-4f5d-b817-0b695086c29d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 581.819206] env[69328]: DEBUG nova.compute.manager [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 581.833146] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Acquiring lock "50b84adc-5ff3-4a1e-a09f-5c96daef9b87" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 581.833146] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Lock "50b84adc-5ff3-4a1e-a09f-5c96daef9b87" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 582.073636] env[69328]: DEBUG oslo_concurrency.lockutils [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 582.301256] env[69328]: DEBUG nova.compute.manager [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 230c6278-65af-4f5d-b817-0b695086c29d] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 582.337887] env[69328]: DEBUG nova.compute.manager [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 50b84adc-5ff3-4a1e-a09f-5c96daef9b87] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 582.354074] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 582.644754] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a66c062a-52d5-48db-88ee-3548e230b38e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.656307] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee74c18a-2ce4-42b5-a00e-82f8fe619251 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.663970] env[69328]: DEBUG oslo_concurrency.lockutils [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Acquiring lock "caba3b5c-db15-4de6-8d3d-41f6751f1b83" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 582.664318] env[69328]: DEBUG oslo_concurrency.lockutils [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Lock "caba3b5c-db15-4de6-8d3d-41f6751f1b83" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 582.701716] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2040c538-2a6b-43da-bad5-864b2a7d0d38 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.713142] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81ed9582-ca5f-4869-ae6b-98e25bea0068 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.727144] env[69328]: DEBUG nova.compute.provider_tree [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 582.833824] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 582.864454] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 583.166917] env[69328]: DEBUG nova.compute.manager [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 583.234088] env[69328]: DEBUG nova.scheduler.client.report [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 583.613511] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Acquiring lock "d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 583.613734] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Lock "d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 583.699700] env[69328]: DEBUG oslo_concurrency.lockutils [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 583.739710] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.225s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 583.740394] env[69328]: DEBUG nova.compute.manager [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 583.744010] env[69328]: DEBUG oslo_concurrency.lockutils [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.670s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 583.745414] env[69328]: INFO nova.compute.claims [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 583.831541] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Acquiring lock "d97dc6d5-e55f-4b9e-91e6-cfdea82f5236" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 583.831757] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Lock "d97dc6d5-e55f-4b9e-91e6-cfdea82f5236" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 584.116256] env[69328]: DEBUG nova.compute.manager [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 584.249674] env[69328]: DEBUG nova.compute.utils [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 584.258698] env[69328]: DEBUG nova.compute.manager [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 584.259053] env[69328]: DEBUG nova.network.neutron [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 584.333779] env[69328]: DEBUG nova.compute.manager [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 584.653180] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 584.767902] env[69328]: DEBUG nova.compute.manager [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 584.837320] env[69328]: DEBUG nova.policy [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bcffb893af0140f8a5deac1f313bade3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2da97117081d44cab074540e0b39d0e5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 584.866676] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 584.972692] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3dac445-8715-47f8-abb1-cbe4aa9fbb04 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.983977] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd725a80-bc20-4cfa-b549-85868e40e647 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.026333] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8efedb91-4d65-4d40-b066-bd9157c66c52 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.035973] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86ec4482-f842-43ed-8b28-5a427f73da7b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.051544] env[69328]: DEBUG nova.compute.provider_tree [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 585.515953] env[69328]: DEBUG nova.network.neutron [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Successfully created port: f00ec7b2-0d01-4e8c-b30b-50314520c094 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 585.559235] env[69328]: DEBUG nova.scheduler.client.report [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 585.786034] env[69328]: DEBUG nova.compute.manager [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 585.834739] env[69328]: DEBUG nova.virt.hardware [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 585.835019] env[69328]: DEBUG nova.virt.hardware [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 585.835173] env[69328]: DEBUG nova.virt.hardware [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 585.835372] env[69328]: DEBUG nova.virt.hardware [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 585.835484] env[69328]: DEBUG nova.virt.hardware [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 585.835624] env[69328]: DEBUG nova.virt.hardware [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 585.835850] env[69328]: DEBUG nova.virt.hardware [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 585.836120] env[69328]: DEBUG nova.virt.hardware [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 585.836548] env[69328]: DEBUG nova.virt.hardware [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 585.836713] env[69328]: DEBUG nova.virt.hardware [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 585.837772] env[69328]: DEBUG nova.virt.hardware [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 585.837934] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62f76e14-1815-43d6-99da-33c0b0c0db43 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.849974] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7e03b9d-1a2d-4ec0-aba3-0c34d1b68faa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.868740] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d84a40f0-cc5e-4868-9103-32a35240e3f6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.996169] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "ed10d511-dbed-4884-8ac6-f737173f62c5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 585.996426] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "ed10d511-dbed-4884-8ac6-f737173f62c5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 586.068159] env[69328]: DEBUG oslo_concurrency.lockutils [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.323s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 586.068159] env[69328]: DEBUG nova.compute.manager [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 586.073394] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.718s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 586.075198] env[69328]: INFO nova.compute.claims [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 586.429172] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Acquiring lock "d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 586.429419] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Lock "d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 586.500735] env[69328]: DEBUG nova.compute.manager [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: ed10d511-dbed-4884-8ac6-f737173f62c5] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 586.580160] env[69328]: DEBUG nova.compute.utils [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 586.584230] env[69328]: DEBUG nova.compute.manager [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 586.584477] env[69328]: DEBUG nova.network.neutron [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 586.668140] env[69328]: DEBUG nova.policy [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'baaa0ef287264c0f8d2ff4dc7ad5b69c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2e75682b9ac94ac39fc889c033d8e9b9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 586.937655] env[69328]: DEBUG nova.compute.manager [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 587.027832] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 587.048145] env[69328]: DEBUG nova.network.neutron [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] Successfully created port: 4764934e-430e-4e3b-a834-5bc38771987e {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 587.085655] env[69328]: DEBUG nova.compute.manager [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 587.316576] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84b41a5e-84ee-4ae3-a6ce-7626dd47dfde {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.328708] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ef07fe3-69d5-4f61-9637-0c8ac1a7c2c8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.363039] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b791cd9-0757-42a7-bca1-9e9b135e6383 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.371079] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e902414-df84-4d29-8f89-a2f1739e17e8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.387758] env[69328]: DEBUG nova.compute.provider_tree [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 587.461353] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 587.890261] env[69328]: DEBUG nova.scheduler.client.report [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 588.007621] env[69328]: DEBUG nova.network.neutron [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Successfully updated port: f00ec7b2-0d01-4e8c-b30b-50314520c094 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 588.098416] env[69328]: DEBUG nova.compute.manager [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 588.136775] env[69328]: DEBUG nova.virt.hardware [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 588.136775] env[69328]: DEBUG nova.virt.hardware [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 588.137130] env[69328]: DEBUG nova.virt.hardware [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 588.137130] env[69328]: DEBUG nova.virt.hardware [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 588.137243] env[69328]: DEBUG nova.virt.hardware [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 588.137454] env[69328]: DEBUG nova.virt.hardware [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 588.137571] env[69328]: DEBUG nova.virt.hardware [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 588.137729] env[69328]: DEBUG nova.virt.hardware [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 588.137891] env[69328]: DEBUG nova.virt.hardware [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 588.138060] env[69328]: DEBUG nova.virt.hardware [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 588.138744] env[69328]: DEBUG nova.virt.hardware [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 588.139327] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dd6bb3b-0ee6-49f8-ac3c-9348b0e5b186 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.150574] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2684f13-405d-4bf5-afd1-38de6a81cc19 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.395796] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.324s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 588.396338] env[69328]: DEBUG nova.compute.manager [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 588.402373] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.569s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 588.404659] env[69328]: INFO nova.compute.claims [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 230c6278-65af-4f5d-b817-0b695086c29d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 588.510180] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Acquiring lock "refresh_cache-88f9f0c2-0c55-45bf-a494-8f1ee4922443" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 588.510180] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Acquired lock "refresh_cache-88f9f0c2-0c55-45bf-a494-8f1ee4922443" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 588.510261] env[69328]: DEBUG nova.network.neutron [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 588.909424] env[69328]: DEBUG nova.compute.utils [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 588.915198] env[69328]: DEBUG nova.compute.manager [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 588.915383] env[69328]: DEBUG nova.network.neutron [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 589.099117] env[69328]: DEBUG nova.network.neutron [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 589.137018] env[69328]: DEBUG nova.policy [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7edb3885beca4231b23321a02895706b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e1712dd10dc849749ef1757ae27cf8e9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 589.204274] env[69328]: DEBUG nova.network.neutron [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] Successfully updated port: 4764934e-430e-4e3b-a834-5bc38771987e {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 589.416561] env[69328]: DEBUG nova.compute.manager [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 589.605195] env[69328]: DEBUG nova.network.neutron [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Updating instance_info_cache with network_info: [{"id": "f00ec7b2-0d01-4e8c-b30b-50314520c094", "address": "fa:16:3e:4c:b2:68", "network": {"id": "a1617aa6-27e2-4648-ad66-bff29c8d3d2a", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-897974516-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2da97117081d44cab074540e0b39d0e5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98f447de-d71e-41ef-bc37-ed97b4a1f58f", "external-id": "nsx-vlan-transportzone-904", "segmentation_id": 904, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf00ec7b2-0d", "ovs_interfaceid": "f00ec7b2-0d01-4e8c-b30b-50314520c094", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 589.638048] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1fc7fef-70a4-4bc7-b8df-4f7d29d87ec0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.660397] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-873cc838-a701-492c-ae04-ea408d77d80a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.668853] env[69328]: DEBUG nova.network.neutron [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] Successfully created port: 6cff9499-a9e2-4bf0-8d52-582ddcfd6392 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 589.708336] env[69328]: DEBUG oslo_concurrency.lockutils [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Acquiring lock "refresh_cache-676173ee-8001-48c6-bd28-09130f6dd99a" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 589.708551] env[69328]: DEBUG oslo_concurrency.lockutils [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Acquired lock "refresh_cache-676173ee-8001-48c6-bd28-09130f6dd99a" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 589.708675] env[69328]: DEBUG nova.network.neutron [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 589.710972] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd0f8898-57e7-4e46-ad79-80f11ac4289b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.719857] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3220e0d9-ee56-4492-8c0f-d1c2a5bceb5e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.739760] env[69328]: DEBUG nova.compute.provider_tree [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 589.837558] env[69328]: DEBUG nova.compute.manager [req-5e7408d6-4150-494d-88bd-27d4c9ca9305 req-ae38739c-af3a-4b39-af81-41bd72383a54 service nova] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Received event network-vif-plugged-f00ec7b2-0d01-4e8c-b30b-50314520c094 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 589.837846] env[69328]: DEBUG oslo_concurrency.lockutils [req-5e7408d6-4150-494d-88bd-27d4c9ca9305 req-ae38739c-af3a-4b39-af81-41bd72383a54 service nova] Acquiring lock "88f9f0c2-0c55-45bf-a494-8f1ee4922443-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 589.838066] env[69328]: DEBUG oslo_concurrency.lockutils [req-5e7408d6-4150-494d-88bd-27d4c9ca9305 req-ae38739c-af3a-4b39-af81-41bd72383a54 service nova] Lock "88f9f0c2-0c55-45bf-a494-8f1ee4922443-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 589.838240] env[69328]: DEBUG oslo_concurrency.lockutils [req-5e7408d6-4150-494d-88bd-27d4c9ca9305 req-ae38739c-af3a-4b39-af81-41bd72383a54 service nova] Lock "88f9f0c2-0c55-45bf-a494-8f1ee4922443-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 589.838695] env[69328]: DEBUG nova.compute.manager [req-5e7408d6-4150-494d-88bd-27d4c9ca9305 req-ae38739c-af3a-4b39-af81-41bd72383a54 service nova] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] No waiting events found dispatching network-vif-plugged-f00ec7b2-0d01-4e8c-b30b-50314520c094 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 589.838891] env[69328]: WARNING nova.compute.manager [req-5e7408d6-4150-494d-88bd-27d4c9ca9305 req-ae38739c-af3a-4b39-af81-41bd72383a54 service nova] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Received unexpected event network-vif-plugged-f00ec7b2-0d01-4e8c-b30b-50314520c094 for instance with vm_state building and task_state spawning. [ 590.111621] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Releasing lock "refresh_cache-88f9f0c2-0c55-45bf-a494-8f1ee4922443" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 590.112071] env[69328]: DEBUG nova.compute.manager [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Instance network_info: |[{"id": "f00ec7b2-0d01-4e8c-b30b-50314520c094", "address": "fa:16:3e:4c:b2:68", "network": {"id": "a1617aa6-27e2-4648-ad66-bff29c8d3d2a", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-897974516-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2da97117081d44cab074540e0b39d0e5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98f447de-d71e-41ef-bc37-ed97b4a1f58f", "external-id": "nsx-vlan-transportzone-904", "segmentation_id": 904, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf00ec7b2-0d", "ovs_interfaceid": "f00ec7b2-0d01-4e8c-b30b-50314520c094", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 590.112547] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4c:b2:68', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '98f447de-d71e-41ef-bc37-ed97b4a1f58f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f00ec7b2-0d01-4e8c-b30b-50314520c094', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 590.129848] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 590.130188] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2d19e89b-2adf-41ef-ac21-49f26fb8a49b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.146574] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Created folder: OpenStack in parent group-v4. [ 590.146574] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Creating folder: Project (2da97117081d44cab074540e0b39d0e5). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 590.147806] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d4e369e9-e8fe-4ee5-a9da-cdb68ed7aaf6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.158092] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Created folder: Project (2da97117081d44cab074540e0b39d0e5) in parent group-v653649. [ 590.158300] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Creating folder: Instances. Parent ref: group-v653650. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 590.158538] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-46f67ca3-3877-4632-bc41-cc72649f68e9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.173371] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Created folder: Instances in parent group-v653650. [ 590.173371] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 590.173371] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 590.173371] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c6c574d2-8608-465f-947e-d5daf6d6ef96 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.194513] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 590.194513] env[69328]: value = "task-3272636" [ 590.194513] env[69328]: _type = "Task" [ 590.194513] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 590.202855] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272636, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.242659] env[69328]: DEBUG nova.scheduler.client.report [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 590.265421] env[69328]: DEBUG nova.network.neutron [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 590.431304] env[69328]: DEBUG nova.compute.manager [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 590.463553] env[69328]: DEBUG nova.virt.hardware [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 590.464506] env[69328]: DEBUG nova.virt.hardware [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 590.464506] env[69328]: DEBUG nova.virt.hardware [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 590.464506] env[69328]: DEBUG nova.virt.hardware [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 590.464506] env[69328]: DEBUG nova.virt.hardware [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 590.464827] env[69328]: DEBUG nova.virt.hardware [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 590.464827] env[69328]: DEBUG nova.virt.hardware [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 590.464921] env[69328]: DEBUG nova.virt.hardware [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 590.465099] env[69328]: DEBUG nova.virt.hardware [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 590.465259] env[69328]: DEBUG nova.virt.hardware [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 590.465422] env[69328]: DEBUG nova.virt.hardware [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 590.466441] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b04284cc-a1fa-4d92-a745-aec5e8ac722b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.470189] env[69328]: DEBUG nova.network.neutron [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] Updating instance_info_cache with network_info: [{"id": "4764934e-430e-4e3b-a834-5bc38771987e", "address": "fa:16:3e:a6:f2:45", "network": {"id": "4707aea3-ce46-4fe0-bf5c-141ee5596a86", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.219", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ecd184c6b78b4e4297fb93abb94aa37d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4764934e-43", "ovs_interfaceid": "4764934e-430e-4e3b-a834-5bc38771987e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 590.477568] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c38c9d10-4b34-4d85-94c6-96e92258c21f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.643968] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 590.644320] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 590.644528] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 590.644736] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 590.644931] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 590.645147] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 590.645393] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 590.645523] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69328) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 590.645623] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager.update_available_resource {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 590.707797] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272636, 'name': CreateVM_Task, 'duration_secs': 0.361357} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 590.707797] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 590.721321] env[69328]: DEBUG oslo_vmware.service [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7521b926-0810-4588-9264-c62562cbc961 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.727917] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 590.728120] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 590.728788] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 590.729068] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d49f147-2e07-448a-b65a-b1d70ed1279e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.736167] env[69328]: DEBUG oslo_vmware.api [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Waiting for the task: (returnval){ [ 590.736167] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52dd6b4d-4eef-f78b-e7cf-90a083ebd44d" [ 590.736167] env[69328]: _type = "Task" [ 590.736167] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 590.747008] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.345s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 590.749489] env[69328]: DEBUG nova.compute.manager [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 230c6278-65af-4f5d-b817-0b695086c29d] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 590.752036] env[69328]: DEBUG oslo_vmware.api [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52dd6b4d-4eef-f78b-e7cf-90a083ebd44d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.752507] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.889s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 590.753919] env[69328]: INFO nova.compute.claims [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 50b84adc-5ff3-4a1e-a09f-5c96daef9b87] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 590.972573] env[69328]: DEBUG oslo_concurrency.lockutils [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Releasing lock "refresh_cache-676173ee-8001-48c6-bd28-09130f6dd99a" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 590.973043] env[69328]: DEBUG nova.compute.manager [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] Instance network_info: |[{"id": "4764934e-430e-4e3b-a834-5bc38771987e", "address": "fa:16:3e:a6:f2:45", "network": {"id": "4707aea3-ce46-4fe0-bf5c-141ee5596a86", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.219", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ecd184c6b78b4e4297fb93abb94aa37d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4764934e-43", "ovs_interfaceid": "4764934e-430e-4e3b-a834-5bc38771987e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 590.973474] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a6:f2:45', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a8b99a46-3e7f-4ef1-9e45-58e6cd17f210', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4764934e-430e-4e3b-a834-5bc38771987e', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 590.983274] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Creating folder: Project (2e75682b9ac94ac39fc889c033d8e9b9). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 590.983760] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e1f31b2b-3c3f-40a8-a394-1a1d61bf5d02 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.998953] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Created folder: Project (2e75682b9ac94ac39fc889c033d8e9b9) in parent group-v653649. [ 590.999283] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Creating folder: Instances. Parent ref: group-v653653. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 590.999687] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fdf4b45d-3bfb-4a96-9e99-e3115458eb15 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.018447] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Created folder: Instances in parent group-v653653. [ 591.018823] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 591.019151] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 591.019506] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e68170bd-7aa0-49cd-80ff-ec91410e76bb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.046199] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 591.046199] env[69328]: value = "task-3272639" [ 591.046199] env[69328]: _type = "Task" [ 591.046199] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.054472] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272639, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.153336] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 591.245763] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 591.246048] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 591.246362] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 591.246433] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 591.247007] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 591.247095] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9d1d69ad-e9d2-47ed-b7d5-8829b7682264 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.254485] env[69328]: DEBUG nova.compute.utils [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 591.256865] env[69328]: DEBUG nova.compute.manager [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 230c6278-65af-4f5d-b817-0b695086c29d] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 591.257058] env[69328]: DEBUG nova.network.neutron [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 230c6278-65af-4f5d-b817-0b695086c29d] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 591.264018] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 591.264018] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 591.264018] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ecedb68-df8b-4ac4-9a1d-fadf9d435e32 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.269255] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b87a68df-f92c-4d44-8386-769b2de18c8f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.275493] env[69328]: DEBUG oslo_vmware.api [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Waiting for the task: (returnval){ [ 591.275493] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]522f6747-d28b-c9e1-a451-a7f135280716" [ 591.275493] env[69328]: _type = "Task" [ 591.275493] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.284917] env[69328]: DEBUG oslo_vmware.api [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]522f6747-d28b-c9e1-a451-a7f135280716, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.339889] env[69328]: DEBUG nova.policy [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5110ae0cc422450ca918d256fe8c1659', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '435a67cec87842678e6c1c354ab09bd7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 591.352037] env[69328]: DEBUG nova.network.neutron [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] Successfully updated port: 6cff9499-a9e2-4bf0-8d52-582ddcfd6392 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 591.557289] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272639, 'name': CreateVM_Task, 'duration_secs': 0.383587} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 591.557685] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 591.561019] env[69328]: DEBUG oslo_concurrency.lockutils [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 591.561019] env[69328]: DEBUG oslo_concurrency.lockutils [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 591.561019] env[69328]: DEBUG oslo_concurrency.lockutils [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 591.561019] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b9549d0-3669-4536-96e9-e516e897b445 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.565085] env[69328]: DEBUG oslo_vmware.api [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Waiting for the task: (returnval){ [ 591.565085] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f608ef-8d23-923a-6ca8-0c4f2e6e645d" [ 591.565085] env[69328]: _type = "Task" [ 591.565085] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.573162] env[69328]: DEBUG oslo_vmware.api [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f608ef-8d23-923a-6ca8-0c4f2e6e645d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.756706] env[69328]: DEBUG nova.network.neutron [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 230c6278-65af-4f5d-b817-0b695086c29d] Successfully created port: f2be515c-61cb-4257-b9e3-858bf3798d6d {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 591.766232] env[69328]: DEBUG nova.compute.manager [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 230c6278-65af-4f5d-b817-0b695086c29d] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 591.791840] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Preparing fetch location {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 591.791840] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Creating directory with path [datastore2] vmware_temp/4241b3ab-e1d7-4b48-9d96-1877a5fd5103/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 591.791840] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b22fd604-ae13-454a-8911-c68eafc6667e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.804094] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Created directory with path [datastore2] vmware_temp/4241b3ab-e1d7-4b48-9d96-1877a5fd5103/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 591.804317] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Fetch image to [datastore2] vmware_temp/4241b3ab-e1d7-4b48-9d96-1877a5fd5103/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/tmp-sparse.vmdk {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 591.804479] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Downloading image file data a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 to [datastore2] vmware_temp/4241b3ab-e1d7-4b48-9d96-1877a5fd5103/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/tmp-sparse.vmdk on the data store datastore2 {{(pid=69328) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 591.805298] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c35f0a86-df03-456a-a038-dc7c853d74d4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.816066] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c38a4ba-7de3-4bba-8079-c6f4a49f0545 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.827337] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f59687fa-9374-404e-8e36-73296802ca6c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.863736] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Acquiring lock "refresh_cache-49a668a7-5967-46a9-823f-7f613d34d152" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 591.863736] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Acquired lock "refresh_cache-49a668a7-5967-46a9-823f-7f613d34d152" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 591.863900] env[69328]: DEBUG nova.network.neutron [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 591.868601] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c8bc6f6-c076-4713-8e1d-e91484a0dfef {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.875765] env[69328]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5ca46e8f-e8cf-4c8c-a93a-749beb40151b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.965921] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc50ddb4-b6fe-42b9-ae08-7545b76af1c8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.974039] env[69328]: DEBUG nova.virt.vmwareapi.images [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Downloading image file data a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 to the data store datastore2 {{(pid=69328) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 591.988474] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c972dacc-5fcc-4a62-897b-b622abb92eda {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.025539] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b61c0e9-e627-48b4-ab31-1ac46821e884 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.036914] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da905052-f0be-4f45-b2a2-9b17fb82c36c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.051839] env[69328]: DEBUG nova.compute.provider_tree [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 592.075630] env[69328]: DEBUG oslo_concurrency.lockutils [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 592.075977] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 592.076150] env[69328]: DEBUG oslo_concurrency.lockutils [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 592.081349] env[69328]: DEBUG oslo_vmware.rw_handles [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4241b3ab-e1d7-4b48-9d96-1877a5fd5103/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69328) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 592.434250] env[69328]: DEBUG oslo_concurrency.lockutils [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Acquiring lock "a798c3f2-ccde-488e-8a14-21f4a04f8e12" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 592.434512] env[69328]: DEBUG oslo_concurrency.lockutils [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Lock "a798c3f2-ccde-488e-8a14-21f4a04f8e12" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 592.448424] env[69328]: DEBUG nova.network.neutron [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 592.555732] env[69328]: DEBUG nova.scheduler.client.report [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 592.692175] env[69328]: DEBUG nova.network.neutron [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] Updating instance_info_cache with network_info: [{"id": "6cff9499-a9e2-4bf0-8d52-582ddcfd6392", "address": "fa:16:3e:18:70:7c", "network": {"id": "4707aea3-ce46-4fe0-bf5c-141ee5596a86", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.90", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ecd184c6b78b4e4297fb93abb94aa37d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6cff9499-a9", "ovs_interfaceid": "6cff9499-a9e2-4bf0-8d52-582ddcfd6392", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 592.730584] env[69328]: DEBUG oslo_vmware.rw_handles [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Completed reading data from the image iterator. {{(pid=69328) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 592.730862] env[69328]: DEBUG oslo_vmware.rw_handles [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4241b3ab-e1d7-4b48-9d96-1877a5fd5103/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69328) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 592.777285] env[69328]: DEBUG nova.compute.manager [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 230c6278-65af-4f5d-b817-0b695086c29d] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 592.812462] env[69328]: DEBUG nova.virt.hardware [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 592.812830] env[69328]: DEBUG nova.virt.hardware [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 592.812885] env[69328]: DEBUG nova.virt.hardware [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 592.816214] env[69328]: DEBUG nova.virt.hardware [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 592.816214] env[69328]: DEBUG nova.virt.hardware [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 592.816214] env[69328]: DEBUG nova.virt.hardware [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 592.816214] env[69328]: DEBUG nova.virt.hardware [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 592.816214] env[69328]: DEBUG nova.virt.hardware [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 592.816428] env[69328]: DEBUG nova.virt.hardware [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 592.816428] env[69328]: DEBUG nova.virt.hardware [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 592.816793] env[69328]: DEBUG nova.virt.hardware [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 592.817525] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb0ca5ee-34d2-4b38-9123-0e644b86f6f7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.827982] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-364544ee-c931-43df-9b8c-2cbcbebefb7e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.864912] env[69328]: DEBUG nova.virt.vmwareapi.images [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Downloaded image file data a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 to vmware_temp/4241b3ab-e1d7-4b48-9d96-1877a5fd5103/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/tmp-sparse.vmdk on the data store datastore2 {{(pid=69328) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 592.866657] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Caching image {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 592.867029] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Copying Virtual Disk [datastore2] vmware_temp/4241b3ab-e1d7-4b48-9d96-1877a5fd5103/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/tmp-sparse.vmdk to [datastore2] vmware_temp/4241b3ab-e1d7-4b48-9d96-1877a5fd5103/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 592.868185] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dc2a89bc-7cfc-4a5f-b2e1-76c4d936eacd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.880885] env[69328]: DEBUG nova.compute.manager [req-347b2e19-c3c7-485d-ba38-420ac890ffca req-69b3f391-abde-431e-94d0-391bef014b6c service nova] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Received event network-changed-f00ec7b2-0d01-4e8c-b30b-50314520c094 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 592.881720] env[69328]: DEBUG nova.compute.manager [req-347b2e19-c3c7-485d-ba38-420ac890ffca req-69b3f391-abde-431e-94d0-391bef014b6c service nova] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Refreshing instance network info cache due to event network-changed-f00ec7b2-0d01-4e8c-b30b-50314520c094. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 592.881991] env[69328]: DEBUG oslo_concurrency.lockutils [req-347b2e19-c3c7-485d-ba38-420ac890ffca req-69b3f391-abde-431e-94d0-391bef014b6c service nova] Acquiring lock "refresh_cache-88f9f0c2-0c55-45bf-a494-8f1ee4922443" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 592.882246] env[69328]: DEBUG oslo_concurrency.lockutils [req-347b2e19-c3c7-485d-ba38-420ac890ffca req-69b3f391-abde-431e-94d0-391bef014b6c service nova] Acquired lock "refresh_cache-88f9f0c2-0c55-45bf-a494-8f1ee4922443" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 592.882330] env[69328]: DEBUG nova.network.neutron [req-347b2e19-c3c7-485d-ba38-420ac890ffca req-69b3f391-abde-431e-94d0-391bef014b6c service nova] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Refreshing network info cache for port f00ec7b2-0d01-4e8c-b30b-50314520c094 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 592.885282] env[69328]: DEBUG oslo_vmware.api [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Waiting for the task: (returnval){ [ 592.885282] env[69328]: value = "task-3272640" [ 592.885282] env[69328]: _type = "Task" [ 592.885282] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.894437] env[69328]: DEBUG oslo_vmware.api [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Task: {'id': task-3272640, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.064259] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.312s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 593.064791] env[69328]: DEBUG nova.compute.manager [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 50b84adc-5ff3-4a1e-a09f-5c96daef9b87] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 593.067681] env[69328]: DEBUG oslo_concurrency.lockutils [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.368s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 593.069123] env[69328]: INFO nova.compute.claims [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 593.195506] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Releasing lock "refresh_cache-49a668a7-5967-46a9-823f-7f613d34d152" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 593.195902] env[69328]: DEBUG nova.compute.manager [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] Instance network_info: |[{"id": "6cff9499-a9e2-4bf0-8d52-582ddcfd6392", "address": "fa:16:3e:18:70:7c", "network": {"id": "4707aea3-ce46-4fe0-bf5c-141ee5596a86", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.90", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ecd184c6b78b4e4297fb93abb94aa37d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6cff9499-a9", "ovs_interfaceid": "6cff9499-a9e2-4bf0-8d52-582ddcfd6392", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 593.196444] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:18:70:7c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a8b99a46-3e7f-4ef1-9e45-58e6cd17f210', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6cff9499-a9e2-4bf0-8d52-582ddcfd6392', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 593.207023] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Creating folder: Project (e1712dd10dc849749ef1757ae27cf8e9). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 593.207023] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c09ed5ed-3ec3-41e8-abff-ebcbe99e94fe {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.216744] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Created folder: Project (e1712dd10dc849749ef1757ae27cf8e9) in parent group-v653649. [ 593.216953] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Creating folder: Instances. Parent ref: group-v653656. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 593.217217] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2eac0edd-15fe-469e-a5d5-fd7450eea576 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.230254] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Created folder: Instances in parent group-v653656. [ 593.230254] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 593.230254] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 593.230254] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f70a140d-c5ed-4f84-b272-b0a1350fdcaa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.253496] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 593.253496] env[69328]: value = "task-3272643" [ 593.253496] env[69328]: _type = "Task" [ 593.253496] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 593.261896] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272643, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.406401] env[69328]: DEBUG oslo_vmware.api [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Task: {'id': task-3272640, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.573816] env[69328]: DEBUG nova.compute.utils [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 593.583426] env[69328]: DEBUG nova.compute.manager [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 50b84adc-5ff3-4a1e-a09f-5c96daef9b87] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 593.583426] env[69328]: DEBUG nova.network.neutron [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 50b84adc-5ff3-4a1e-a09f-5c96daef9b87] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 593.671177] env[69328]: DEBUG nova.policy [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5110ae0cc422450ca918d256fe8c1659', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '435a67cec87842678e6c1c354ab09bd7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 593.757485] env[69328]: DEBUG nova.network.neutron [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 230c6278-65af-4f5d-b817-0b695086c29d] Successfully updated port: f2be515c-61cb-4257-b9e3-858bf3798d6d {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 593.772776] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272643, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.903675] env[69328]: DEBUG oslo_vmware.api [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Task: {'id': task-3272640, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.747563} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 593.904155] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Copied Virtual Disk [datastore2] vmware_temp/4241b3ab-e1d7-4b48-9d96-1877a5fd5103/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/tmp-sparse.vmdk to [datastore2] vmware_temp/4241b3ab-e1d7-4b48-9d96-1877a5fd5103/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 593.904487] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Deleting the datastore file [datastore2] vmware_temp/4241b3ab-e1d7-4b48-9d96-1877a5fd5103/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/tmp-sparse.vmdk {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 593.905278] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-37000e0c-a318-4a8f-9f92-8303298259a0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.914515] env[69328]: DEBUG oslo_vmware.api [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Waiting for the task: (returnval){ [ 593.914515] env[69328]: value = "task-3272644" [ 593.914515] env[69328]: _type = "Task" [ 593.914515] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 593.924786] env[69328]: DEBUG oslo_vmware.api [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Task: {'id': task-3272644, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.001964] env[69328]: DEBUG nova.network.neutron [req-347b2e19-c3c7-485d-ba38-420ac890ffca req-69b3f391-abde-431e-94d0-391bef014b6c service nova] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Updated VIF entry in instance network info cache for port f00ec7b2-0d01-4e8c-b30b-50314520c094. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 594.001964] env[69328]: DEBUG nova.network.neutron [req-347b2e19-c3c7-485d-ba38-420ac890ffca req-69b3f391-abde-431e-94d0-391bef014b6c service nova] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Updating instance_info_cache with network_info: [{"id": "f00ec7b2-0d01-4e8c-b30b-50314520c094", "address": "fa:16:3e:4c:b2:68", "network": {"id": "a1617aa6-27e2-4648-ad66-bff29c8d3d2a", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-897974516-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2da97117081d44cab074540e0b39d0e5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98f447de-d71e-41ef-bc37-ed97b4a1f58f", "external-id": "nsx-vlan-transportzone-904", "segmentation_id": 904, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf00ec7b2-0d", "ovs_interfaceid": "f00ec7b2-0d01-4e8c-b30b-50314520c094", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 594.081386] env[69328]: DEBUG nova.compute.manager [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 50b84adc-5ff3-4a1e-a09f-5c96daef9b87] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 594.271730] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Acquiring lock "refresh_cache-230c6278-65af-4f5d-b817-0b695086c29d" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 594.271730] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Acquired lock "refresh_cache-230c6278-65af-4f5d-b817-0b695086c29d" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 594.271730] env[69328]: DEBUG nova.network.neutron [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 230c6278-65af-4f5d-b817-0b695086c29d] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 594.272308] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272643, 'name': CreateVM_Task, 'duration_secs': 0.52796} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 594.272729] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 594.273449] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 594.273796] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 594.274068] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 594.277637] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-42cee575-b3c6-4fd8-9f87-78e452dfcb6d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.280445] env[69328]: DEBUG nova.network.neutron [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 50b84adc-5ff3-4a1e-a09f-5c96daef9b87] Successfully created port: d79088e2-4f2b-49c3-bb85-8d8c7c108a8e {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 594.287624] env[69328]: DEBUG oslo_vmware.api [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Waiting for the task: (returnval){ [ 594.287624] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52fbe95a-d056-8bb1-8249-283b91459300" [ 594.287624] env[69328]: _type = "Task" [ 594.287624] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 594.304388] env[69328]: DEBUG oslo_vmware.api [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52fbe95a-d056-8bb1-8249-283b91459300, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.319788] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73166403-4705-484c-9c6e-7f365f011f39 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.329257] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-984b15b2-9bf2-4fdf-a705-25bc0f096c9f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.366544] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec3149b9-eec3-4c53-a159-c907518139ef {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.375923] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05dfb7bb-b69b-4bd0-8e7a-276d464def89 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.391556] env[69328]: DEBUG nova.compute.provider_tree [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 594.425409] env[69328]: DEBUG oslo_vmware.api [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Task: {'id': task-3272644, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.022528} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 594.425781] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 594.425868] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Moving file from [datastore2] vmware_temp/4241b3ab-e1d7-4b48-9d96-1877a5fd5103/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 to [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318. {{(pid=69328) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 594.426134] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-b2d04be7-a695-46c3-a61c-ad35b743b3b8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.433974] env[69328]: DEBUG oslo_vmware.api [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Waiting for the task: (returnval){ [ 594.433974] env[69328]: value = "task-3272645" [ 594.433974] env[69328]: _type = "Task" [ 594.433974] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 594.443984] env[69328]: DEBUG oslo_vmware.api [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Task: {'id': task-3272645, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.508447] env[69328]: DEBUG oslo_concurrency.lockutils [req-347b2e19-c3c7-485d-ba38-420ac890ffca req-69b3f391-abde-431e-94d0-391bef014b6c service nova] Releasing lock "refresh_cache-88f9f0c2-0c55-45bf-a494-8f1ee4922443" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 594.508447] env[69328]: DEBUG nova.compute.manager [req-347b2e19-c3c7-485d-ba38-420ac890ffca req-69b3f391-abde-431e-94d0-391bef014b6c service nova] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] Received event network-vif-plugged-4764934e-430e-4e3b-a834-5bc38771987e {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 594.508447] env[69328]: DEBUG oslo_concurrency.lockutils [req-347b2e19-c3c7-485d-ba38-420ac890ffca req-69b3f391-abde-431e-94d0-391bef014b6c service nova] Acquiring lock "676173ee-8001-48c6-bd28-09130f6dd99a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 594.509401] env[69328]: DEBUG oslo_concurrency.lockutils [req-347b2e19-c3c7-485d-ba38-420ac890ffca req-69b3f391-abde-431e-94d0-391bef014b6c service nova] Lock "676173ee-8001-48c6-bd28-09130f6dd99a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 594.509401] env[69328]: DEBUG oslo_concurrency.lockutils [req-347b2e19-c3c7-485d-ba38-420ac890ffca req-69b3f391-abde-431e-94d0-391bef014b6c service nova] Lock "676173ee-8001-48c6-bd28-09130f6dd99a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 594.510229] env[69328]: DEBUG nova.compute.manager [req-347b2e19-c3c7-485d-ba38-420ac890ffca req-69b3f391-abde-431e-94d0-391bef014b6c service nova] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] No waiting events found dispatching network-vif-plugged-4764934e-430e-4e3b-a834-5bc38771987e {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 594.510229] env[69328]: WARNING nova.compute.manager [req-347b2e19-c3c7-485d-ba38-420ac890ffca req-69b3f391-abde-431e-94d0-391bef014b6c service nova] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] Received unexpected event network-vif-plugged-4764934e-430e-4e3b-a834-5bc38771987e for instance with vm_state building and task_state spawning. [ 594.510229] env[69328]: DEBUG nova.compute.manager [req-347b2e19-c3c7-485d-ba38-420ac890ffca req-69b3f391-abde-431e-94d0-391bef014b6c service nova] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] Received event network-changed-4764934e-430e-4e3b-a834-5bc38771987e {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 594.510229] env[69328]: DEBUG nova.compute.manager [req-347b2e19-c3c7-485d-ba38-420ac890ffca req-69b3f391-abde-431e-94d0-391bef014b6c service nova] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] Refreshing instance network info cache due to event network-changed-4764934e-430e-4e3b-a834-5bc38771987e. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 594.510362] env[69328]: DEBUG oslo_concurrency.lockutils [req-347b2e19-c3c7-485d-ba38-420ac890ffca req-69b3f391-abde-431e-94d0-391bef014b6c service nova] Acquiring lock "refresh_cache-676173ee-8001-48c6-bd28-09130f6dd99a" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 594.510439] env[69328]: DEBUG oslo_concurrency.lockutils [req-347b2e19-c3c7-485d-ba38-420ac890ffca req-69b3f391-abde-431e-94d0-391bef014b6c service nova] Acquired lock "refresh_cache-676173ee-8001-48c6-bd28-09130f6dd99a" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 594.510784] env[69328]: DEBUG nova.network.neutron [req-347b2e19-c3c7-485d-ba38-420ac890ffca req-69b3f391-abde-431e-94d0-391bef014b6c service nova] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] Refreshing network info cache for port 4764934e-430e-4e3b-a834-5bc38771987e {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 594.645018] env[69328]: DEBUG oslo_concurrency.lockutils [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Acquiring lock "26feb2d1-ff64-4a13-af83-b6d5fe4348e1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 594.645351] env[69328]: DEBUG oslo_concurrency.lockutils [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Lock "26feb2d1-ff64-4a13-af83-b6d5fe4348e1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 594.798814] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 594.799146] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 594.799377] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 594.806903] env[69328]: DEBUG nova.compute.manager [req-37763ab6-fce1-4ed1-adb0-668c39ab7451 req-fea03303-d338-4612-a4b9-c8ccc512434c service nova] [instance: 230c6278-65af-4f5d-b817-0b695086c29d] Received event network-vif-plugged-f2be515c-61cb-4257-b9e3-858bf3798d6d {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 594.807129] env[69328]: DEBUG oslo_concurrency.lockutils [req-37763ab6-fce1-4ed1-adb0-668c39ab7451 req-fea03303-d338-4612-a4b9-c8ccc512434c service nova] Acquiring lock "230c6278-65af-4f5d-b817-0b695086c29d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 594.807421] env[69328]: DEBUG oslo_concurrency.lockutils [req-37763ab6-fce1-4ed1-adb0-668c39ab7451 req-fea03303-d338-4612-a4b9-c8ccc512434c service nova] Lock "230c6278-65af-4f5d-b817-0b695086c29d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 594.807509] env[69328]: DEBUG oslo_concurrency.lockutils [req-37763ab6-fce1-4ed1-adb0-668c39ab7451 req-fea03303-d338-4612-a4b9-c8ccc512434c service nova] Lock "230c6278-65af-4f5d-b817-0b695086c29d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 594.807665] env[69328]: DEBUG nova.compute.manager [req-37763ab6-fce1-4ed1-adb0-668c39ab7451 req-fea03303-d338-4612-a4b9-c8ccc512434c service nova] [instance: 230c6278-65af-4f5d-b817-0b695086c29d] No waiting events found dispatching network-vif-plugged-f2be515c-61cb-4257-b9e3-858bf3798d6d {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 594.808237] env[69328]: WARNING nova.compute.manager [req-37763ab6-fce1-4ed1-adb0-668c39ab7451 req-fea03303-d338-4612-a4b9-c8ccc512434c service nova] [instance: 230c6278-65af-4f5d-b817-0b695086c29d] Received unexpected event network-vif-plugged-f2be515c-61cb-4257-b9e3-858bf3798d6d for instance with vm_state building and task_state spawning. [ 594.854964] env[69328]: DEBUG nova.network.neutron [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 230c6278-65af-4f5d-b817-0b695086c29d] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 594.895999] env[69328]: DEBUG nova.scheduler.client.report [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 594.947562] env[69328]: DEBUG oslo_vmware.api [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Task: {'id': task-3272645, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.029403} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 594.948359] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] File moved {{(pid=69328) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 594.948359] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Cleaning up location [datastore2] vmware_temp/4241b3ab-e1d7-4b48-9d96-1877a5fd5103 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 594.948359] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Deleting the datastore file [datastore2] vmware_temp/4241b3ab-e1d7-4b48-9d96-1877a5fd5103 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 594.948855] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a92b7bf3-4bda-40d4-a3a0-789826276db4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.956084] env[69328]: DEBUG oslo_vmware.api [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Waiting for the task: (returnval){ [ 594.956084] env[69328]: value = "task-3272646" [ 594.956084] env[69328]: _type = "Task" [ 594.956084] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 594.968721] env[69328]: DEBUG oslo_vmware.api [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Task: {'id': task-3272646, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.099447] env[69328]: DEBUG nova.compute.manager [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 50b84adc-5ff3-4a1e-a09f-5c96daef9b87] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 595.171815] env[69328]: DEBUG nova.network.neutron [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 230c6278-65af-4f5d-b817-0b695086c29d] Updating instance_info_cache with network_info: [{"id": "f2be515c-61cb-4257-b9e3-858bf3798d6d", "address": "fa:16:3e:2f:c7:3f", "network": {"id": "a4231ba3-2b54-4dd9-82bc-772b0823748c", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-322499105-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "435a67cec87842678e6c1c354ab09bd7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "195e328b-e41a-49f5-9e51-546b8ea8ceba", "external-id": "nsx-vlan-transportzone-735", "segmentation_id": 735, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2be515c-61", "ovs_interfaceid": "f2be515c-61cb-4257-b9e3-858bf3798d6d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 595.254600] env[69328]: DEBUG nova.virt.hardware [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 595.254871] env[69328]: DEBUG nova.virt.hardware [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 595.255071] env[69328]: DEBUG nova.virt.hardware [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 595.255306] env[69328]: DEBUG nova.virt.hardware [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 595.255455] env[69328]: DEBUG nova.virt.hardware [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 595.255616] env[69328]: DEBUG nova.virt.hardware [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 595.256044] env[69328]: DEBUG nova.virt.hardware [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 595.256302] env[69328]: DEBUG nova.virt.hardware [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 595.256413] env[69328]: DEBUG nova.virt.hardware [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 595.256587] env[69328]: DEBUG nova.virt.hardware [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 595.256759] env[69328]: DEBUG nova.virt.hardware [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 595.258038] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61cd340a-cd5d-4c98-bcac-8b5d0c8d188c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.268065] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55aab33d-e989-4d15-823e-5f8201897abe {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.397916] env[69328]: DEBUG nova.network.neutron [req-347b2e19-c3c7-485d-ba38-420ac890ffca req-69b3f391-abde-431e-94d0-391bef014b6c service nova] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] Updated VIF entry in instance network info cache for port 4764934e-430e-4e3b-a834-5bc38771987e. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 595.397916] env[69328]: DEBUG nova.network.neutron [req-347b2e19-c3c7-485d-ba38-420ac890ffca req-69b3f391-abde-431e-94d0-391bef014b6c service nova] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] Updating instance_info_cache with network_info: [{"id": "4764934e-430e-4e3b-a834-5bc38771987e", "address": "fa:16:3e:a6:f2:45", "network": {"id": "4707aea3-ce46-4fe0-bf5c-141ee5596a86", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.219", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ecd184c6b78b4e4297fb93abb94aa37d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4764934e-43", "ovs_interfaceid": "4764934e-430e-4e3b-a834-5bc38771987e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 595.401517] env[69328]: DEBUG oslo_concurrency.lockutils [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.334s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 595.402031] env[69328]: DEBUG nova.compute.manager [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 595.405365] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.752s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 595.406655] env[69328]: INFO nova.compute.claims [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 595.474759] env[69328]: DEBUG oslo_vmware.api [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Task: {'id': task-3272646, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.029582} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 595.476447] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 595.476876] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dfb62b14-f032-4a8c-b905-9fded50a9cca {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.483607] env[69328]: DEBUG oslo_vmware.api [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Waiting for the task: (returnval){ [ 595.483607] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52037838-2ae7-db42-34c9-c02e16df7dcd" [ 595.483607] env[69328]: _type = "Task" [ 595.483607] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.501155] env[69328]: DEBUG oslo_vmware.api [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52037838-2ae7-db42-34c9-c02e16df7dcd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.675690] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Releasing lock "refresh_cache-230c6278-65af-4f5d-b817-0b695086c29d" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 595.676299] env[69328]: DEBUG nova.compute.manager [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 230c6278-65af-4f5d-b817-0b695086c29d] Instance network_info: |[{"id": "f2be515c-61cb-4257-b9e3-858bf3798d6d", "address": "fa:16:3e:2f:c7:3f", "network": {"id": "a4231ba3-2b54-4dd9-82bc-772b0823748c", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-322499105-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "435a67cec87842678e6c1c354ab09bd7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "195e328b-e41a-49f5-9e51-546b8ea8ceba", "external-id": "nsx-vlan-transportzone-735", "segmentation_id": 735, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2be515c-61", "ovs_interfaceid": "f2be515c-61cb-4257-b9e3-858bf3798d6d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 595.676617] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 230c6278-65af-4f5d-b817-0b695086c29d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2f:c7:3f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '195e328b-e41a-49f5-9e51-546b8ea8ceba', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f2be515c-61cb-4257-b9e3-858bf3798d6d', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 595.687018] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Creating folder: Project (435a67cec87842678e6c1c354ab09bd7). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 595.693406] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b45d8e6c-ac08-44d6-8fd3-e42735767fd4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.704234] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Created folder: Project (435a67cec87842678e6c1c354ab09bd7) in parent group-v653649. [ 595.704406] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Creating folder: Instances. Parent ref: group-v653659. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 595.704652] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-adf758b1-a7dd-4dfe-a874-89d124f06d26 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.713616] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Created folder: Instances in parent group-v653659. [ 595.713848] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 595.714052] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 230c6278-65af-4f5d-b817-0b695086c29d] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 595.714257] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6db3a117-9cbc-4e43-8e37-3c8a02bb092d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.736858] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 595.736858] env[69328]: value = "task-3272649" [ 595.736858] env[69328]: _type = "Task" [ 595.736858] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.748971] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272649, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.901565] env[69328]: DEBUG oslo_concurrency.lockutils [req-347b2e19-c3c7-485d-ba38-420ac890ffca req-69b3f391-abde-431e-94d0-391bef014b6c service nova] Releasing lock "refresh_cache-676173ee-8001-48c6-bd28-09130f6dd99a" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 595.901904] env[69328]: DEBUG nova.compute.manager [req-347b2e19-c3c7-485d-ba38-420ac890ffca req-69b3f391-abde-431e-94d0-391bef014b6c service nova] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] Received event network-vif-plugged-6cff9499-a9e2-4bf0-8d52-582ddcfd6392 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 595.902140] env[69328]: DEBUG oslo_concurrency.lockutils [req-347b2e19-c3c7-485d-ba38-420ac890ffca req-69b3f391-abde-431e-94d0-391bef014b6c service nova] Acquiring lock "49a668a7-5967-46a9-823f-7f613d34d152-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 595.902317] env[69328]: DEBUG oslo_concurrency.lockutils [req-347b2e19-c3c7-485d-ba38-420ac890ffca req-69b3f391-abde-431e-94d0-391bef014b6c service nova] Lock "49a668a7-5967-46a9-823f-7f613d34d152-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 595.902487] env[69328]: DEBUG oslo_concurrency.lockutils [req-347b2e19-c3c7-485d-ba38-420ac890ffca req-69b3f391-abde-431e-94d0-391bef014b6c service nova] Lock "49a668a7-5967-46a9-823f-7f613d34d152-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 595.902671] env[69328]: DEBUG nova.compute.manager [req-347b2e19-c3c7-485d-ba38-420ac890ffca req-69b3f391-abde-431e-94d0-391bef014b6c service nova] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] No waiting events found dispatching network-vif-plugged-6cff9499-a9e2-4bf0-8d52-582ddcfd6392 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 595.902814] env[69328]: WARNING nova.compute.manager [req-347b2e19-c3c7-485d-ba38-420ac890ffca req-69b3f391-abde-431e-94d0-391bef014b6c service nova] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] Received unexpected event network-vif-plugged-6cff9499-a9e2-4bf0-8d52-582ddcfd6392 for instance with vm_state building and task_state spawning. [ 595.903072] env[69328]: DEBUG nova.compute.manager [req-347b2e19-c3c7-485d-ba38-420ac890ffca req-69b3f391-abde-431e-94d0-391bef014b6c service nova] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] Received event network-changed-6cff9499-a9e2-4bf0-8d52-582ddcfd6392 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 595.903190] env[69328]: DEBUG nova.compute.manager [req-347b2e19-c3c7-485d-ba38-420ac890ffca req-69b3f391-abde-431e-94d0-391bef014b6c service nova] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] Refreshing instance network info cache due to event network-changed-6cff9499-a9e2-4bf0-8d52-582ddcfd6392. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 595.903381] env[69328]: DEBUG oslo_concurrency.lockutils [req-347b2e19-c3c7-485d-ba38-420ac890ffca req-69b3f391-abde-431e-94d0-391bef014b6c service nova] Acquiring lock "refresh_cache-49a668a7-5967-46a9-823f-7f613d34d152" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 595.903513] env[69328]: DEBUG oslo_concurrency.lockutils [req-347b2e19-c3c7-485d-ba38-420ac890ffca req-69b3f391-abde-431e-94d0-391bef014b6c service nova] Acquired lock "refresh_cache-49a668a7-5967-46a9-823f-7f613d34d152" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 595.903684] env[69328]: DEBUG nova.network.neutron [req-347b2e19-c3c7-485d-ba38-420ac890ffca req-69b3f391-abde-431e-94d0-391bef014b6c service nova] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] Refreshing network info cache for port 6cff9499-a9e2-4bf0-8d52-582ddcfd6392 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 595.913658] env[69328]: DEBUG nova.compute.utils [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 595.918435] env[69328]: DEBUG nova.compute.manager [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 595.918435] env[69328]: DEBUG nova.network.neutron [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 596.001419] env[69328]: DEBUG oslo_vmware.api [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52037838-2ae7-db42-34c9-c02e16df7dcd, 'name': SearchDatastore_Task, 'duration_secs': 0.017605} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.002465] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 596.002879] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 88f9f0c2-0c55-45bf-a494-8f1ee4922443/88f9f0c2-0c55-45bf-a494-8f1ee4922443.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 596.003342] env[69328]: DEBUG oslo_concurrency.lockutils [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 596.003831] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 596.004317] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-234b5538-d253-44ed-9d29-69b1d2c67d9c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.006911] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-69e63ff4-a42e-44b1-92c2-0e45d61fa0ef {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.017146] env[69328]: DEBUG oslo_vmware.api [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Waiting for the task: (returnval){ [ 596.017146] env[69328]: value = "task-3272650" [ 596.017146] env[69328]: _type = "Task" [ 596.017146] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.019633] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 596.020577] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 596.025036] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-331b4b03-f674-47f7-96e4-648a124b4bea {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.029411] env[69328]: DEBUG nova.policy [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a58627ef79fd46f09e894c28ce701fc7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2f5fc8b18f7d496aabcb51075fc4a94b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 596.037467] env[69328]: DEBUG oslo_vmware.api [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Task: {'id': task-3272650, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.039352] env[69328]: DEBUG oslo_vmware.api [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Waiting for the task: (returnval){ [ 596.039352] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52838ce9-235f-e14b-d477-517b2aa57f2d" [ 596.039352] env[69328]: _type = "Task" [ 596.039352] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.049050] env[69328]: DEBUG oslo_vmware.api [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52838ce9-235f-e14b-d477-517b2aa57f2d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.247342] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272649, 'name': CreateVM_Task, 'duration_secs': 0.389581} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.247635] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 230c6278-65af-4f5d-b817-0b695086c29d] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 596.248559] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 596.248559] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 596.248863] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 596.249127] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59ecc7ab-1d7d-4b68-bb1e-5a18325cccf2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.257365] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Waiting for the task: (returnval){ [ 596.257365] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5248ac84-6bb9-3530-ea81-621cb4cdb678" [ 596.257365] env[69328]: _type = "Task" [ 596.257365] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.266544] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5248ac84-6bb9-3530-ea81-621cb4cdb678, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.289073] env[69328]: DEBUG nova.network.neutron [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 50b84adc-5ff3-4a1e-a09f-5c96daef9b87] Successfully updated port: d79088e2-4f2b-49c3-bb85-8d8c7c108a8e {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 596.422455] env[69328]: DEBUG nova.compute.manager [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 596.532271] env[69328]: DEBUG oslo_vmware.api [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Task: {'id': task-3272650, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.554966] env[69328]: DEBUG oslo_vmware.api [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52838ce9-235f-e14b-d477-517b2aa57f2d, 'name': SearchDatastore_Task, 'duration_secs': 0.025665} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.563187] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12d76768-d76a-42d3-aff2-5cef1f367fb1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.570283] env[69328]: DEBUG oslo_vmware.api [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Waiting for the task: (returnval){ [ 596.570283] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]521802f0-fb40-ec67-eaa2-ce68230baa34" [ 596.570283] env[69328]: _type = "Task" [ 596.570283] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.582150] env[69328]: DEBUG oslo_vmware.api [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]521802f0-fb40-ec67-eaa2-ce68230baa34, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.718700] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53970ff8-cfa7-4729-847f-8fa2546795af {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.731241] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc265cd3-1048-4361-b833-dadfdf9a219f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.767634] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44faf17d-5e12-4fff-8549-5a0d17dc5711 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.775566] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5248ac84-6bb9-3530-ea81-621cb4cdb678, 'name': SearchDatastore_Task, 'duration_secs': 0.010709} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.778529] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 596.778678] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 230c6278-65af-4f5d-b817-0b695086c29d] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 596.778866] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 596.780220] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cef60d6-aed4-4cba-a22a-74de2bb41c44 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.795708] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Acquiring lock "refresh_cache-50b84adc-5ff3-4a1e-a09f-5c96daef9b87" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 596.795860] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Acquired lock "refresh_cache-50b84adc-5ff3-4a1e-a09f-5c96daef9b87" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 596.796171] env[69328]: DEBUG nova.network.neutron [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 50b84adc-5ff3-4a1e-a09f-5c96daef9b87] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 596.797591] env[69328]: DEBUG nova.compute.provider_tree [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 596.800547] env[69328]: DEBUG nova.network.neutron [req-347b2e19-c3c7-485d-ba38-420ac890ffca req-69b3f391-abde-431e-94d0-391bef014b6c service nova] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] Updated VIF entry in instance network info cache for port 6cff9499-a9e2-4bf0-8d52-582ddcfd6392. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 596.800889] env[69328]: DEBUG nova.network.neutron [req-347b2e19-c3c7-485d-ba38-420ac890ffca req-69b3f391-abde-431e-94d0-391bef014b6c service nova] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] Updating instance_info_cache with network_info: [{"id": "6cff9499-a9e2-4bf0-8d52-582ddcfd6392", "address": "fa:16:3e:18:70:7c", "network": {"id": "4707aea3-ce46-4fe0-bf5c-141ee5596a86", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.90", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ecd184c6b78b4e4297fb93abb94aa37d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6cff9499-a9", "ovs_interfaceid": "6cff9499-a9e2-4bf0-8d52-582ddcfd6392", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 596.805758] env[69328]: DEBUG nova.compute.manager [req-57eec2e0-5ca7-44bd-b594-ff3fb9ba296b req-2aa48a24-7ec6-483a-8e7d-61ac194e700f service nova] [instance: 50b84adc-5ff3-4a1e-a09f-5c96daef9b87] Received event network-vif-plugged-d79088e2-4f2b-49c3-bb85-8d8c7c108a8e {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 596.805758] env[69328]: DEBUG oslo_concurrency.lockutils [req-57eec2e0-5ca7-44bd-b594-ff3fb9ba296b req-2aa48a24-7ec6-483a-8e7d-61ac194e700f service nova] Acquiring lock "50b84adc-5ff3-4a1e-a09f-5c96daef9b87-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 596.805758] env[69328]: DEBUG oslo_concurrency.lockutils [req-57eec2e0-5ca7-44bd-b594-ff3fb9ba296b req-2aa48a24-7ec6-483a-8e7d-61ac194e700f service nova] Lock "50b84adc-5ff3-4a1e-a09f-5c96daef9b87-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 596.805911] env[69328]: DEBUG oslo_concurrency.lockutils [req-57eec2e0-5ca7-44bd-b594-ff3fb9ba296b req-2aa48a24-7ec6-483a-8e7d-61ac194e700f service nova] Lock "50b84adc-5ff3-4a1e-a09f-5c96daef9b87-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 596.805947] env[69328]: DEBUG nova.compute.manager [req-57eec2e0-5ca7-44bd-b594-ff3fb9ba296b req-2aa48a24-7ec6-483a-8e7d-61ac194e700f service nova] [instance: 50b84adc-5ff3-4a1e-a09f-5c96daef9b87] No waiting events found dispatching network-vif-plugged-d79088e2-4f2b-49c3-bb85-8d8c7c108a8e {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 596.806103] env[69328]: WARNING nova.compute.manager [req-57eec2e0-5ca7-44bd-b594-ff3fb9ba296b req-2aa48a24-7ec6-483a-8e7d-61ac194e700f service nova] [instance: 50b84adc-5ff3-4a1e-a09f-5c96daef9b87] Received unexpected event network-vif-plugged-d79088e2-4f2b-49c3-bb85-8d8c7c108a8e for instance with vm_state building and task_state spawning. [ 596.989227] env[69328]: DEBUG nova.network.neutron [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Successfully created port: 03adda47-e195-413d-85d7-5fd0c5a5027b {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 597.035776] env[69328]: DEBUG oslo_vmware.api [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Task: {'id': task-3272650, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.608902} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.036083] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 88f9f0c2-0c55-45bf-a494-8f1ee4922443/88f9f0c2-0c55-45bf-a494-8f1ee4922443.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 597.036269] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 597.036692] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5811b70c-a02c-483f-b234-96e69bc6d7cf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.044503] env[69328]: DEBUG oslo_vmware.api [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Waiting for the task: (returnval){ [ 597.044503] env[69328]: value = "task-3272651" [ 597.044503] env[69328]: _type = "Task" [ 597.044503] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.054496] env[69328]: DEBUG oslo_vmware.api [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Task: {'id': task-3272651, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.082717] env[69328]: DEBUG oslo_vmware.api [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]521802f0-fb40-ec67-eaa2-ce68230baa34, 'name': SearchDatastore_Task, 'duration_secs': 0.035309} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.082989] env[69328]: DEBUG oslo_concurrency.lockutils [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 597.083272] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 676173ee-8001-48c6-bd28-09130f6dd99a/676173ee-8001-48c6-bd28-09130f6dd99a.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 597.083574] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 597.083763] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 597.083961] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ddbf1b8b-3736-4a1c-b488-461e630a72e7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.086500] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-508b1703-58fe-4037-b919-5d1e49f523ec {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.092747] env[69328]: DEBUG oslo_vmware.api [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Waiting for the task: (returnval){ [ 597.092747] env[69328]: value = "task-3272652" [ 597.092747] env[69328]: _type = "Task" [ 597.092747] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.096979] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 597.097249] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 597.098237] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00ea7d2c-b158-4c31-bbe9-cadbab423d16 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.103913] env[69328]: DEBUG oslo_vmware.api [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Task: {'id': task-3272652, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.107073] env[69328]: DEBUG oslo_vmware.api [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Waiting for the task: (returnval){ [ 597.107073] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d045e5-da2a-3de9-e19c-ad16f56cf8d0" [ 597.107073] env[69328]: _type = "Task" [ 597.107073] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.116635] env[69328]: DEBUG oslo_vmware.api [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d045e5-da2a-3de9-e19c-ad16f56cf8d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.308719] env[69328]: DEBUG nova.scheduler.client.report [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 597.316373] env[69328]: DEBUG oslo_concurrency.lockutils [req-347b2e19-c3c7-485d-ba38-420ac890ffca req-69b3f391-abde-431e-94d0-391bef014b6c service nova] Releasing lock "refresh_cache-49a668a7-5967-46a9-823f-7f613d34d152" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 597.362357] env[69328]: DEBUG nova.network.neutron [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 50b84adc-5ff3-4a1e-a09f-5c96daef9b87] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 597.440234] env[69328]: DEBUG nova.compute.manager [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 597.473077] env[69328]: DEBUG nova.virt.hardware [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 597.473825] env[69328]: DEBUG nova.virt.hardware [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 597.473918] env[69328]: DEBUG nova.virt.hardware [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 597.474205] env[69328]: DEBUG nova.virt.hardware [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 597.474399] env[69328]: DEBUG nova.virt.hardware [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 597.474598] env[69328]: DEBUG nova.virt.hardware [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 597.474774] env[69328]: DEBUG nova.virt.hardware [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 597.475009] env[69328]: DEBUG nova.virt.hardware [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 597.475315] env[69328]: DEBUG nova.virt.hardware [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 597.475798] env[69328]: DEBUG nova.virt.hardware [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 597.475916] env[69328]: DEBUG nova.virt.hardware [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 597.477190] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68028a15-acd1-47b4-8c21-07dae92d958f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.490549] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33b3affe-1f9a-4402-ae9d-a2cc678ccd88 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.556353] env[69328]: DEBUG oslo_vmware.api [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Task: {'id': task-3272651, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067748} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.556353] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 597.556353] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16d707e2-649c-42a8-be88-f15d14c95d26 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.587745] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Reconfiguring VM instance instance-00000001 to attach disk [datastore2] 88f9f0c2-0c55-45bf-a494-8f1ee4922443/88f9f0c2-0c55-45bf-a494-8f1ee4922443.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 597.590703] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6676d28d-a8a9-4ad9-931b-158a6059f9a8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.618109] env[69328]: DEBUG oslo_vmware.api [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Waiting for the task: (returnval){ [ 597.618109] env[69328]: value = "task-3272653" [ 597.618109] env[69328]: _type = "Task" [ 597.618109] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.622371] env[69328]: DEBUG oslo_vmware.api [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Task: {'id': task-3272652, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.515367} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.627220] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 676173ee-8001-48c6-bd28-09130f6dd99a/676173ee-8001-48c6-bd28-09130f6dd99a.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 597.627220] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 597.627833] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9bdfcc6f-0302-4827-827b-376cc4b82b5d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.635420] env[69328]: DEBUG oslo_vmware.api [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d045e5-da2a-3de9-e19c-ad16f56cf8d0, 'name': SearchDatastore_Task, 'duration_secs': 0.019781} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.636519] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83bf3f52-e85b-4faf-b3ed-9b30ca7c0f83 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.647204] env[69328]: DEBUG oslo_vmware.api [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Waiting for the task: (returnval){ [ 597.647204] env[69328]: value = "task-3272654" [ 597.647204] env[69328]: _type = "Task" [ 597.647204] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.650273] env[69328]: DEBUG oslo_vmware.api [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Task: {'id': task-3272653, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.660390] env[69328]: DEBUG oslo_vmware.api [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Waiting for the task: (returnval){ [ 597.660390] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]521d854f-96d0-758e-8664-86055d9bc156" [ 597.660390] env[69328]: _type = "Task" [ 597.660390] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.667229] env[69328]: DEBUG oslo_vmware.api [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Task: {'id': task-3272654, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.672711] env[69328]: DEBUG oslo_vmware.api [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]521d854f-96d0-758e-8664-86055d9bc156, 'name': SearchDatastore_Task, 'duration_secs': 0.010193} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.672948] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 597.673645] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 49a668a7-5967-46a9-823f-7f613d34d152/49a668a7-5967-46a9-823f-7f613d34d152.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 597.673645] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 597.673793] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 597.673892] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-180ca7be-8c54-49e6-bd32-7d2d41a71c97 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.676076] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-58d25699-1dec-4827-879c-6eda325197ac {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.682262] env[69328]: DEBUG oslo_vmware.api [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Waiting for the task: (returnval){ [ 597.682262] env[69328]: value = "task-3272655" [ 597.682262] env[69328]: _type = "Task" [ 597.682262] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.686033] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 597.686271] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 597.687296] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a9268fec-303b-4598-98ff-90c892732969 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.693601] env[69328]: DEBUG oslo_vmware.api [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Task: {'id': task-3272655, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.694918] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Waiting for the task: (returnval){ [ 597.694918] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52c6b74a-9df2-b4b3-ede8-ed19a1eb1e04" [ 597.694918] env[69328]: _type = "Task" [ 597.694918] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.702140] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52c6b74a-9df2-b4b3-ede8-ed19a1eb1e04, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.814816] env[69328]: DEBUG nova.compute.manager [req-172ad4fd-00d5-425d-8998-3181c0258aa3 req-d1fd5d47-9e99-4a9f-a1e4-d5d0d454d225 service nova] [instance: 230c6278-65af-4f5d-b817-0b695086c29d] Received event network-changed-f2be515c-61cb-4257-b9e3-858bf3798d6d {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 597.815290] env[69328]: DEBUG nova.compute.manager [req-172ad4fd-00d5-425d-8998-3181c0258aa3 req-d1fd5d47-9e99-4a9f-a1e4-d5d0d454d225 service nova] [instance: 230c6278-65af-4f5d-b817-0b695086c29d] Refreshing instance network info cache due to event network-changed-f2be515c-61cb-4257-b9e3-858bf3798d6d. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 597.815573] env[69328]: DEBUG oslo_concurrency.lockutils [req-172ad4fd-00d5-425d-8998-3181c0258aa3 req-d1fd5d47-9e99-4a9f-a1e4-d5d0d454d225 service nova] Acquiring lock "refresh_cache-230c6278-65af-4f5d-b817-0b695086c29d" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 597.815630] env[69328]: DEBUG oslo_concurrency.lockutils [req-172ad4fd-00d5-425d-8998-3181c0258aa3 req-d1fd5d47-9e99-4a9f-a1e4-d5d0d454d225 service nova] Acquired lock "refresh_cache-230c6278-65af-4f5d-b817-0b695086c29d" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 597.815799] env[69328]: DEBUG nova.network.neutron [req-172ad4fd-00d5-425d-8998-3181c0258aa3 req-d1fd5d47-9e99-4a9f-a1e4-d5d0d454d225 service nova] [instance: 230c6278-65af-4f5d-b817-0b695086c29d] Refreshing network info cache for port f2be515c-61cb-4257-b9e3-858bf3798d6d {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 597.821150] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.416s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 597.821841] env[69328]: DEBUG nova.compute.manager [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 597.824258] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.958s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 597.826031] env[69328]: INFO nova.compute.claims [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 597.851185] env[69328]: DEBUG nova.network.neutron [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 50b84adc-5ff3-4a1e-a09f-5c96daef9b87] Updating instance_info_cache with network_info: [{"id": "d79088e2-4f2b-49c3-bb85-8d8c7c108a8e", "address": "fa:16:3e:25:84:5e", "network": {"id": "a4231ba3-2b54-4dd9-82bc-772b0823748c", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-322499105-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "435a67cec87842678e6c1c354ab09bd7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "195e328b-e41a-49f5-9e51-546b8ea8ceba", "external-id": "nsx-vlan-transportzone-735", "segmentation_id": 735, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd79088e2-4f", "ovs_interfaceid": "d79088e2-4f2b-49c3-bb85-8d8c7c108a8e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 598.137625] env[69328]: DEBUG oslo_vmware.api [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Task: {'id': task-3272653, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.164548] env[69328]: DEBUG oslo_vmware.api [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Task: {'id': task-3272654, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077509} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.164899] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 598.165811] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86ce401f-6a2b-4d21-a3f2-524d992dbcd8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.206548] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] Reconfiguring VM instance instance-00000002 to attach disk [datastore2] 676173ee-8001-48c6-bd28-09130f6dd99a/676173ee-8001-48c6-bd28-09130f6dd99a.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 598.211240] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-82e2d25d-1d7d-4e28-8152-3e3a1f00b792 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.238273] env[69328]: DEBUG oslo_vmware.api [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Task: {'id': task-3272655, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.245499] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52c6b74a-9df2-b4b3-ede8-ed19a1eb1e04, 'name': SearchDatastore_Task, 'duration_secs': 0.011367} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.245499] env[69328]: DEBUG oslo_vmware.api [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Waiting for the task: (returnval){ [ 598.245499] env[69328]: value = "task-3272656" [ 598.245499] env[69328]: _type = "Task" [ 598.245499] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.246105] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ae4996d-ddb6-4bad-aabb-a57fd030daba {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.261243] env[69328]: DEBUG oslo_vmware.api [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Task: {'id': task-3272656, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.262186] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Waiting for the task: (returnval){ [ 598.262186] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ea215d-8914-b910-0e0b-05a6e932044c" [ 598.262186] env[69328]: _type = "Task" [ 598.262186] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.272083] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ea215d-8914-b910-0e0b-05a6e932044c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.333400] env[69328]: DEBUG nova.compute.utils [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 598.336380] env[69328]: DEBUG nova.compute.manager [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 598.336567] env[69328]: DEBUG nova.network.neutron [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 598.354131] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Releasing lock "refresh_cache-50b84adc-5ff3-4a1e-a09f-5c96daef9b87" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 598.357547] env[69328]: DEBUG nova.compute.manager [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 50b84adc-5ff3-4a1e-a09f-5c96daef9b87] Instance network_info: |[{"id": "d79088e2-4f2b-49c3-bb85-8d8c7c108a8e", "address": "fa:16:3e:25:84:5e", "network": {"id": "a4231ba3-2b54-4dd9-82bc-772b0823748c", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-322499105-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "435a67cec87842678e6c1c354ab09bd7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "195e328b-e41a-49f5-9e51-546b8ea8ceba", "external-id": "nsx-vlan-transportzone-735", "segmentation_id": 735, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd79088e2-4f", "ovs_interfaceid": "d79088e2-4f2b-49c3-bb85-8d8c7c108a8e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 598.358210] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 50b84adc-5ff3-4a1e-a09f-5c96daef9b87] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:25:84:5e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '195e328b-e41a-49f5-9e51-546b8ea8ceba', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd79088e2-4f2b-49c3-bb85-8d8c7c108a8e', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 598.368071] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 598.368677] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50b84adc-5ff3-4a1e-a09f-5c96daef9b87] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 598.368905] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5146a93d-f765-43b7-af84-93e1e933a61e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.390544] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 598.390544] env[69328]: value = "task-3272657" [ 598.390544] env[69328]: _type = "Task" [ 598.390544] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.401389] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272657, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.549787] env[69328]: DEBUG nova.policy [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '91ee0807be574796bec53919ecd5a934', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f2aed2695f2d437fbe9202124d2ed95b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 598.633949] env[69328]: DEBUG oslo_vmware.api [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Task: {'id': task-3272653, 'name': ReconfigVM_Task, 'duration_secs': 0.616635} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.634245] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Reconfigured VM instance instance-00000001 to attach disk [datastore2] 88f9f0c2-0c55-45bf-a494-8f1ee4922443/88f9f0c2-0c55-45bf-a494-8f1ee4922443.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 598.634940] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3c025ae3-2886-4a73-96b2-b2ca7f7f6b53 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.641820] env[69328]: DEBUG oslo_vmware.api [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Waiting for the task: (returnval){ [ 598.641820] env[69328]: value = "task-3272658" [ 598.641820] env[69328]: _type = "Task" [ 598.641820] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.652503] env[69328]: DEBUG oslo_vmware.api [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Task: {'id': task-3272658, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.707018] env[69328]: DEBUG oslo_vmware.api [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Task: {'id': task-3272655, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.710474} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.707018] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 49a668a7-5967-46a9-823f-7f613d34d152/49a668a7-5967-46a9-823f-7f613d34d152.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 598.707018] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 598.707018] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1d08378a-1f2d-4a64-b326-a439497ef634 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.713528] env[69328]: DEBUG oslo_vmware.api [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Waiting for the task: (returnval){ [ 598.713528] env[69328]: value = "task-3272659" [ 598.713528] env[69328]: _type = "Task" [ 598.713528] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.724961] env[69328]: DEBUG oslo_vmware.api [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Task: {'id': task-3272659, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.757688] env[69328]: DEBUG oslo_vmware.api [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Task: {'id': task-3272656, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.771857] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ea215d-8914-b910-0e0b-05a6e932044c, 'name': SearchDatastore_Task, 'duration_secs': 0.069539} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.772099] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 598.772363] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 230c6278-65af-4f5d-b817-0b695086c29d/230c6278-65af-4f5d-b817-0b695086c29d.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 598.772691] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-214b4d90-2819-4782-a924-8fba3a1a090e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.779368] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Waiting for the task: (returnval){ [ 598.779368] env[69328]: value = "task-3272660" [ 598.779368] env[69328]: _type = "Task" [ 598.779368] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.788481] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3272660, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.837885] env[69328]: DEBUG nova.compute.manager [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 598.909332] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272657, 'name': CreateVM_Task, 'duration_secs': 0.412517} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.909936] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50b84adc-5ff3-4a1e-a09f-5c96daef9b87] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 598.911185] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 598.911185] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 598.912897] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 598.912897] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff98cfe4-4234-4e14-9694-858ea2ac3425 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.918314] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Waiting for the task: (returnval){ [ 598.918314] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52543908-50f1-a6e0-3083-7d24cfdbed19" [ 598.918314] env[69328]: _type = "Task" [ 598.918314] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.928232] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52543908-50f1-a6e0-3083-7d24cfdbed19, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.086228] env[69328]: DEBUG nova.network.neutron [req-172ad4fd-00d5-425d-8998-3181c0258aa3 req-d1fd5d47-9e99-4a9f-a1e4-d5d0d454d225 service nova] [instance: 230c6278-65af-4f5d-b817-0b695086c29d] Updated VIF entry in instance network info cache for port f2be515c-61cb-4257-b9e3-858bf3798d6d. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 599.086485] env[69328]: DEBUG nova.network.neutron [req-172ad4fd-00d5-425d-8998-3181c0258aa3 req-d1fd5d47-9e99-4a9f-a1e4-d5d0d454d225 service nova] [instance: 230c6278-65af-4f5d-b817-0b695086c29d] Updating instance_info_cache with network_info: [{"id": "f2be515c-61cb-4257-b9e3-858bf3798d6d", "address": "fa:16:3e:2f:c7:3f", "network": {"id": "a4231ba3-2b54-4dd9-82bc-772b0823748c", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-322499105-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "435a67cec87842678e6c1c354ab09bd7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "195e328b-e41a-49f5-9e51-546b8ea8ceba", "external-id": "nsx-vlan-transportzone-735", "segmentation_id": 735, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2be515c-61", "ovs_interfaceid": "f2be515c-61cb-4257-b9e3-858bf3798d6d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 599.100912] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-814bda3d-8e44-48e5-8a9f-baf51e83041b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.112792] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2651d4cc-0116-49b5-a99d-fb72d2710733 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.155782] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fbed052-5ca6-4063-b744-b8e2333cf5f2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.163612] env[69328]: DEBUG oslo_vmware.api [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Task: {'id': task-3272658, 'name': Rename_Task, 'duration_secs': 0.215081} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.165536] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 599.165819] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3eb83f04-f4ff-47c8-aaba-0e1278c01fe2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.168326] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-358a9cb2-fe6e-4462-9b31-15367dbc1c34 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.184376] env[69328]: DEBUG nova.compute.provider_tree [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 599.186512] env[69328]: DEBUG oslo_vmware.api [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Waiting for the task: (returnval){ [ 599.186512] env[69328]: value = "task-3272661" [ 599.186512] env[69328]: _type = "Task" [ 599.186512] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.224527] env[69328]: DEBUG oslo_vmware.api [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Task: {'id': task-3272659, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086336} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.224728] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 599.225737] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a32f6467-437a-4d0f-b240-b6d388706c00 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.251124] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] Reconfiguring VM instance instance-00000003 to attach disk [datastore2] 49a668a7-5967-46a9-823f-7f613d34d152/49a668a7-5967-46a9-823f-7f613d34d152.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 599.251534] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c8db44f8-65c4-4561-b9df-c9ebc6faec34 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.276751] env[69328]: DEBUG oslo_vmware.api [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Task: {'id': task-3272656, 'name': ReconfigVM_Task, 'duration_secs': 0.558962} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.278281] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] Reconfigured VM instance instance-00000002 to attach disk [datastore2] 676173ee-8001-48c6-bd28-09130f6dd99a/676173ee-8001-48c6-bd28-09130f6dd99a.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 599.278974] env[69328]: DEBUG oslo_vmware.api [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Waiting for the task: (returnval){ [ 599.278974] env[69328]: value = "task-3272662" [ 599.278974] env[69328]: _type = "Task" [ 599.278974] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.279162] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-62bc15b6-fd4e-4fd7-9441-7c224cb562c8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.294019] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3272660, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.296575] env[69328]: DEBUG oslo_vmware.api [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Waiting for the task: (returnval){ [ 599.296575] env[69328]: value = "task-3272663" [ 599.296575] env[69328]: _type = "Task" [ 599.296575] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.303947] env[69328]: DEBUG oslo_vmware.api [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Task: {'id': task-3272663, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.358909] env[69328]: DEBUG nova.network.neutron [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Successfully created port: cbf38f9d-1507-45bb-9684-bf804c86b93b {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 599.415720] env[69328]: DEBUG nova.network.neutron [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Successfully updated port: 03adda47-e195-413d-85d7-5fd0c5a5027b {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 599.434916] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52543908-50f1-a6e0-3083-7d24cfdbed19, 'name': SearchDatastore_Task, 'duration_secs': 0.013497} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.435241] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 599.435482] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 50b84adc-5ff3-4a1e-a09f-5c96daef9b87] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 599.435691] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 599.437109] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 599.437109] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 599.437109] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c241b2dc-eb41-4753-aefb-0ef2475ddb51 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.447651] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 599.447799] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 599.448670] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1470c9c8-2c34-407a-8d27-06dd3e2322e4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.455041] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Waiting for the task: (returnval){ [ 599.455041] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52fe7b16-5a15-2132-ce51-0b86e678c624" [ 599.455041] env[69328]: _type = "Task" [ 599.455041] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.464184] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52fe7b16-5a15-2132-ce51-0b86e678c624, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.591170] env[69328]: DEBUG oslo_concurrency.lockutils [req-172ad4fd-00d5-425d-8998-3181c0258aa3 req-d1fd5d47-9e99-4a9f-a1e4-d5d0d454d225 service nova] Releasing lock "refresh_cache-230c6278-65af-4f5d-b817-0b695086c29d" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 599.690525] env[69328]: DEBUG nova.scheduler.client.report [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 599.705112] env[69328]: DEBUG oslo_vmware.api [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Task: {'id': task-3272661, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.793845] env[69328]: DEBUG oslo_vmware.api [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Task: {'id': task-3272662, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.796825] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3272660, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.804303] env[69328]: DEBUG oslo_vmware.api [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Task: {'id': task-3272663, 'name': Rename_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.852344] env[69328]: DEBUG nova.compute.manager [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 599.876268] env[69328]: DEBUG nova.virt.hardware [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 599.876268] env[69328]: DEBUG nova.virt.hardware [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 599.876452] env[69328]: DEBUG nova.virt.hardware [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 599.876571] env[69328]: DEBUG nova.virt.hardware [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 599.876641] env[69328]: DEBUG nova.virt.hardware [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 599.876760] env[69328]: DEBUG nova.virt.hardware [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 599.876967] env[69328]: DEBUG nova.virt.hardware [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 599.877162] env[69328]: DEBUG nova.virt.hardware [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 599.877429] env[69328]: DEBUG nova.virt.hardware [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 599.877488] env[69328]: DEBUG nova.virt.hardware [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 599.877714] env[69328]: DEBUG nova.virt.hardware [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 599.878508] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb11bd30-2713-40da-865b-5823a25e0aa9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.886779] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ecd07c8-f44a-492b-a4da-bf60fdd9b660 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.918965] env[69328]: DEBUG oslo_concurrency.lockutils [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Acquiring lock "refresh_cache-caba3b5c-db15-4de6-8d3d-41f6751f1b83" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 599.919182] env[69328]: DEBUG oslo_concurrency.lockutils [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Acquired lock "refresh_cache-caba3b5c-db15-4de6-8d3d-41f6751f1b83" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 599.919363] env[69328]: DEBUG nova.network.neutron [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 599.968627] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52fe7b16-5a15-2132-ce51-0b86e678c624, 'name': SearchDatastore_Task, 'duration_secs': 0.009286} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.969467] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-05569504-4add-474e-adda-97591448615e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.978483] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Waiting for the task: (returnval){ [ 599.978483] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d5197b-a051-476b-8569-2ae96a8bc170" [ 599.978483] env[69328]: _type = "Task" [ 599.978483] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.990425] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d5197b-a051-476b-8569-2ae96a8bc170, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.202012] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.376s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 600.202934] env[69328]: DEBUG nova.compute.manager [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 600.206758] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.178s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 600.209978] env[69328]: INFO nova.compute.claims [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: ed10d511-dbed-4884-8ac6-f737173f62c5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 600.220205] env[69328]: DEBUG oslo_vmware.api [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Task: {'id': task-3272661, 'name': PowerOnVM_Task} progress is 71%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.297646] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3272660, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.305278] env[69328]: DEBUG oslo_vmware.api [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Task: {'id': task-3272662, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.308933] env[69328]: DEBUG oslo_vmware.api [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Task: {'id': task-3272663, 'name': Rename_Task, 'duration_secs': 0.720287} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.309186] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 600.309466] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8e3c0a40-a9d9-4347-902e-6600ed5d02fc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.316200] env[69328]: DEBUG oslo_vmware.api [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Waiting for the task: (returnval){ [ 600.316200] env[69328]: value = "task-3272664" [ 600.316200] env[69328]: _type = "Task" [ 600.316200] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.325581] env[69328]: DEBUG oslo_vmware.api [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Task: {'id': task-3272664, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.475354] env[69328]: DEBUG nova.network.neutron [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 600.492154] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d5197b-a051-476b-8569-2ae96a8bc170, 'name': SearchDatastore_Task, 'duration_secs': 0.057053} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.492837] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 600.493311] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 50b84adc-5ff3-4a1e-a09f-5c96daef9b87/50b84adc-5ff3-4a1e-a09f-5c96daef9b87.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 600.493959] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0cf75e85-0d36-4783-a602-e15ce89b6e03 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.509320] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Waiting for the task: (returnval){ [ 600.509320] env[69328]: value = "task-3272665" [ 600.509320] env[69328]: _type = "Task" [ 600.509320] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.516345] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3272665, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.629040] env[69328]: DEBUG nova.compute.manager [req-f653844c-3e78-4a7e-86c9-979760fa7c8a req-095674d4-a7b7-4028-9fc0-793474d21918 service nova] [instance: 50b84adc-5ff3-4a1e-a09f-5c96daef9b87] Received event network-changed-d79088e2-4f2b-49c3-bb85-8d8c7c108a8e {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 600.629404] env[69328]: DEBUG nova.compute.manager [req-f653844c-3e78-4a7e-86c9-979760fa7c8a req-095674d4-a7b7-4028-9fc0-793474d21918 service nova] [instance: 50b84adc-5ff3-4a1e-a09f-5c96daef9b87] Refreshing instance network info cache due to event network-changed-d79088e2-4f2b-49c3-bb85-8d8c7c108a8e. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 600.630230] env[69328]: DEBUG oslo_concurrency.lockutils [req-f653844c-3e78-4a7e-86c9-979760fa7c8a req-095674d4-a7b7-4028-9fc0-793474d21918 service nova] Acquiring lock "refresh_cache-50b84adc-5ff3-4a1e-a09f-5c96daef9b87" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 600.630453] env[69328]: DEBUG oslo_concurrency.lockutils [req-f653844c-3e78-4a7e-86c9-979760fa7c8a req-095674d4-a7b7-4028-9fc0-793474d21918 service nova] Acquired lock "refresh_cache-50b84adc-5ff3-4a1e-a09f-5c96daef9b87" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 600.630708] env[69328]: DEBUG nova.network.neutron [req-f653844c-3e78-4a7e-86c9-979760fa7c8a req-095674d4-a7b7-4028-9fc0-793474d21918 service nova] [instance: 50b84adc-5ff3-4a1e-a09f-5c96daef9b87] Refreshing network info cache for port d79088e2-4f2b-49c3-bb85-8d8c7c108a8e {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 600.710779] env[69328]: DEBUG oslo_vmware.api [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Task: {'id': task-3272661, 'name': PowerOnVM_Task, 'duration_secs': 1.364906} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.712900] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 600.712900] env[69328]: INFO nova.compute.manager [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Took 14.93 seconds to spawn the instance on the hypervisor. [ 600.712900] env[69328]: DEBUG nova.compute.manager [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 600.712900] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5089930b-4373-4bc4-8e80-fd04f20749cd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.719539] env[69328]: DEBUG nova.compute.utils [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 600.721866] env[69328]: DEBUG nova.compute.manager [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 600.722077] env[69328]: DEBUG nova.network.neutron [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 600.738741] env[69328]: DEBUG nova.network.neutron [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Updating instance_info_cache with network_info: [{"id": "03adda47-e195-413d-85d7-5fd0c5a5027b", "address": "fa:16:3e:c6:a1:68", "network": {"id": "4707aea3-ce46-4fe0-bf5c-141ee5596a86", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.109", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ecd184c6b78b4e4297fb93abb94aa37d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap03adda47-e1", "ovs_interfaceid": "03adda47-e195-413d-85d7-5fd0c5a5027b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 600.804628] env[69328]: DEBUG oslo_vmware.api [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Task: {'id': task-3272662, 'name': ReconfigVM_Task, 'duration_secs': 1.157582} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.809548] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] Reconfigured VM instance instance-00000003 to attach disk [datastore2] 49a668a7-5967-46a9-823f-7f613d34d152/49a668a7-5967-46a9-823f-7f613d34d152.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 600.811730] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3272660, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.526501} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.811730] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5a43a482-d1e3-4f2c-923b-6666f62258e7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.813341] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 230c6278-65af-4f5d-b817-0b695086c29d/230c6278-65af-4f5d-b817-0b695086c29d.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 600.813869] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 230c6278-65af-4f5d-b817-0b695086c29d] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 600.814237] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-16131d6a-c73c-49e8-bc54-77fc715e5e25 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.824700] env[69328]: DEBUG oslo_vmware.api [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Waiting for the task: (returnval){ [ 600.824700] env[69328]: value = "task-3272666" [ 600.824700] env[69328]: _type = "Task" [ 600.824700] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.836308] env[69328]: DEBUG oslo_vmware.api [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Task: {'id': task-3272664, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.836765] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Waiting for the task: (returnval){ [ 600.836765] env[69328]: value = "task-3272667" [ 600.836765] env[69328]: _type = "Task" [ 600.836765] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.846177] env[69328]: DEBUG oslo_vmware.api [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Task: {'id': task-3272666, 'name': Rename_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.850298] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3272667, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.861265] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Acquiring lock "edb1a21a-6907-4198-a977-c1213e8fecc0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 600.861265] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Lock "edb1a21a-6907-4198-a977-c1213e8fecc0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 600.886460] env[69328]: DEBUG nova.policy [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9f9460824e6743d7871597dc1680323e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '066e4edc64b44104b45e81bdb34e4d5d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 601.023529] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3272665, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.227569] env[69328]: DEBUG nova.compute.manager [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 601.245389] env[69328]: DEBUG oslo_concurrency.lockutils [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Releasing lock "refresh_cache-caba3b5c-db15-4de6-8d3d-41f6751f1b83" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 601.245749] env[69328]: DEBUG nova.compute.manager [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Instance network_info: |[{"id": "03adda47-e195-413d-85d7-5fd0c5a5027b", "address": "fa:16:3e:c6:a1:68", "network": {"id": "4707aea3-ce46-4fe0-bf5c-141ee5596a86", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.109", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ecd184c6b78b4e4297fb93abb94aa37d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap03adda47-e1", "ovs_interfaceid": "03adda47-e195-413d-85d7-5fd0c5a5027b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 601.248037] env[69328]: INFO nova.compute.manager [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Took 19.76 seconds to build instance. [ 601.248037] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c6:a1:68', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a8b99a46-3e7f-4ef1-9e45-58e6cd17f210', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '03adda47-e195-413d-85d7-5fd0c5a5027b', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 601.259175] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Creating folder: Project (2f5fc8b18f7d496aabcb51075fc4a94b). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 601.260090] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-97e690ed-e1ba-4a99-844a-1fa527e83573 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.274518] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Created folder: Project (2f5fc8b18f7d496aabcb51075fc4a94b) in parent group-v653649. [ 601.274722] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Creating folder: Instances. Parent ref: group-v653663. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 601.275856] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b4c696a8-445f-488a-a140-38cbd830de2b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.286346] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Created folder: Instances in parent group-v653663. [ 601.287020] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 601.287220] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 601.287264] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0ed348a6-e2d3-4cfd-890f-fcba6ef1dafd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.317445] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 601.317445] env[69328]: value = "task-3272670" [ 601.317445] env[69328]: _type = "Task" [ 601.317445] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.340917] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272670, 'name': CreateVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.347695] env[69328]: DEBUG oslo_vmware.api [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Task: {'id': task-3272664, 'name': PowerOnVM_Task, 'duration_secs': 0.762922} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.354396] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 601.354396] env[69328]: INFO nova.compute.manager [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] Took 13.26 seconds to spawn the instance on the hypervisor. [ 601.354396] env[69328]: DEBUG nova.compute.manager [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 601.354396] env[69328]: DEBUG oslo_vmware.api [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Task: {'id': task-3272666, 'name': Rename_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.355642] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f796628-65c2-453c-bdd4-c2b460013776 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.361862] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3272667, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.095485} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.362526] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 230c6278-65af-4f5d-b817-0b695086c29d] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 601.363352] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53c3fa05-45b8-45cb-b981-7f2db1951f6a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.390546] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 230c6278-65af-4f5d-b817-0b695086c29d] Reconfiguring VM instance instance-00000004 to attach disk [datastore2] 230c6278-65af-4f5d-b817-0b695086c29d/230c6278-65af-4f5d-b817-0b695086c29d.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 601.393752] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-77c524e1-d16d-409b-a05e-95ab7f24eab4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.417821] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Waiting for the task: (returnval){ [ 601.417821] env[69328]: value = "task-3272671" [ 601.417821] env[69328]: _type = "Task" [ 601.417821] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.424948] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3272671, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.477799] env[69328]: DEBUG nova.compute.manager [req-c96ea918-ec4c-4d15-989f-1ffca9cd0ff1 req-5dc64386-2d24-4851-9b84-437a7dbf7cc5 service nova] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Received event network-vif-plugged-03adda47-e195-413d-85d7-5fd0c5a5027b {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 601.478030] env[69328]: DEBUG oslo_concurrency.lockutils [req-c96ea918-ec4c-4d15-989f-1ffca9cd0ff1 req-5dc64386-2d24-4851-9b84-437a7dbf7cc5 service nova] Acquiring lock "caba3b5c-db15-4de6-8d3d-41f6751f1b83-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 601.478981] env[69328]: DEBUG oslo_concurrency.lockutils [req-c96ea918-ec4c-4d15-989f-1ffca9cd0ff1 req-5dc64386-2d24-4851-9b84-437a7dbf7cc5 service nova] Lock "caba3b5c-db15-4de6-8d3d-41f6751f1b83-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 601.479197] env[69328]: DEBUG oslo_concurrency.lockutils [req-c96ea918-ec4c-4d15-989f-1ffca9cd0ff1 req-5dc64386-2d24-4851-9b84-437a7dbf7cc5 service nova] Lock "caba3b5c-db15-4de6-8d3d-41f6751f1b83-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 601.479374] env[69328]: DEBUG nova.compute.manager [req-c96ea918-ec4c-4d15-989f-1ffca9cd0ff1 req-5dc64386-2d24-4851-9b84-437a7dbf7cc5 service nova] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] No waiting events found dispatching network-vif-plugged-03adda47-e195-413d-85d7-5fd0c5a5027b {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 601.479546] env[69328]: WARNING nova.compute.manager [req-c96ea918-ec4c-4d15-989f-1ffca9cd0ff1 req-5dc64386-2d24-4851-9b84-437a7dbf7cc5 service nova] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Received unexpected event network-vif-plugged-03adda47-e195-413d-85d7-5fd0c5a5027b for instance with vm_state building and task_state spawning. [ 601.479908] env[69328]: DEBUG nova.compute.manager [req-c96ea918-ec4c-4d15-989f-1ffca9cd0ff1 req-5dc64386-2d24-4851-9b84-437a7dbf7cc5 service nova] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Received event network-changed-03adda47-e195-413d-85d7-5fd0c5a5027b {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 601.479908] env[69328]: DEBUG nova.compute.manager [req-c96ea918-ec4c-4d15-989f-1ffca9cd0ff1 req-5dc64386-2d24-4851-9b84-437a7dbf7cc5 service nova] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Refreshing instance network info cache due to event network-changed-03adda47-e195-413d-85d7-5fd0c5a5027b. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 601.481916] env[69328]: DEBUG oslo_concurrency.lockutils [req-c96ea918-ec4c-4d15-989f-1ffca9cd0ff1 req-5dc64386-2d24-4851-9b84-437a7dbf7cc5 service nova] Acquiring lock "refresh_cache-caba3b5c-db15-4de6-8d3d-41f6751f1b83" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 601.481916] env[69328]: DEBUG oslo_concurrency.lockutils [req-c96ea918-ec4c-4d15-989f-1ffca9cd0ff1 req-5dc64386-2d24-4851-9b84-437a7dbf7cc5 service nova] Acquired lock "refresh_cache-caba3b5c-db15-4de6-8d3d-41f6751f1b83" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 601.481916] env[69328]: DEBUG nova.network.neutron [req-c96ea918-ec4c-4d15-989f-1ffca9cd0ff1 req-5dc64386-2d24-4851-9b84-437a7dbf7cc5 service nova] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Refreshing network info cache for port 03adda47-e195-413d-85d7-5fd0c5a5027b {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 601.518970] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3272665, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.603634} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.519739] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 50b84adc-5ff3-4a1e-a09f-5c96daef9b87/50b84adc-5ff3-4a1e-a09f-5c96daef9b87.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 601.519739] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 50b84adc-5ff3-4a1e-a09f-5c96daef9b87] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 601.520183] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b2ccc931-d68c-49be-a405-c928691cff0a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.530564] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Waiting for the task: (returnval){ [ 601.530564] env[69328]: value = "task-3272672" [ 601.530564] env[69328]: _type = "Task" [ 601.530564] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.542804] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3272672, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.558772] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f32850ec-8370-4003-bf3a-36281054c5c1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.566457] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bde07d10-3e7e-4119-8e67-c8356421cd1c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.605250] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1860363-d3d9-4048-97fd-d4efedc580f2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.612452] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86046c63-f505-4b44-882f-e389a29520b0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.629751] env[69328]: DEBUG nova.compute.provider_tree [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 601.651950] env[69328]: DEBUG nova.network.neutron [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Successfully updated port: cbf38f9d-1507-45bb-9684-bf804c86b93b {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 601.761205] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2f512e4a-1923-49d4-83bd-61d44dba8ac4 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Lock "88f9f0c2-0c55-45bf-a494-8f1ee4922443" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.284s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 601.832307] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272670, 'name': CreateVM_Task, 'duration_secs': 0.389701} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.838754] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 601.838754] env[69328]: DEBUG oslo_vmware.service [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-509e64fb-bf01-4b1c-afae-074ae5a72c16 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.846538] env[69328]: DEBUG oslo_vmware.api [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Task: {'id': task-3272666, 'name': Rename_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.848523] env[69328]: DEBUG oslo_concurrency.lockutils [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 601.848523] env[69328]: DEBUG oslo_concurrency.lockutils [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 601.848861] env[69328]: DEBUG oslo_concurrency.lockutils [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 601.849556] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-acdd2a8a-b14a-4da3-bdb2-7c2c4b3071de {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.854674] env[69328]: DEBUG oslo_vmware.api [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Waiting for the task: (returnval){ [ 601.854674] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52687f53-fff2-6779-5497-8cd442d25499" [ 601.854674] env[69328]: _type = "Task" [ 601.854674] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.866296] env[69328]: DEBUG oslo_vmware.api [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52687f53-fff2-6779-5497-8cd442d25499, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.878034] env[69328]: INFO nova.compute.manager [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] Took 19.84 seconds to build instance. [ 601.929532] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3272671, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.951024] env[69328]: DEBUG nova.network.neutron [req-f653844c-3e78-4a7e-86c9-979760fa7c8a req-095674d4-a7b7-4028-9fc0-793474d21918 service nova] [instance: 50b84adc-5ff3-4a1e-a09f-5c96daef9b87] Updated VIF entry in instance network info cache for port d79088e2-4f2b-49c3-bb85-8d8c7c108a8e. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 601.951024] env[69328]: DEBUG nova.network.neutron [req-f653844c-3e78-4a7e-86c9-979760fa7c8a req-095674d4-a7b7-4028-9fc0-793474d21918 service nova] [instance: 50b84adc-5ff3-4a1e-a09f-5c96daef9b87] Updating instance_info_cache with network_info: [{"id": "d79088e2-4f2b-49c3-bb85-8d8c7c108a8e", "address": "fa:16:3e:25:84:5e", "network": {"id": "a4231ba3-2b54-4dd9-82bc-772b0823748c", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-322499105-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "435a67cec87842678e6c1c354ab09bd7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "195e328b-e41a-49f5-9e51-546b8ea8ceba", "external-id": "nsx-vlan-transportzone-735", "segmentation_id": 735, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd79088e2-4f", "ovs_interfaceid": "d79088e2-4f2b-49c3-bb85-8d8c7c108a8e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 602.043633] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3272672, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070453} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.043979] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 50b84adc-5ff3-4a1e-a09f-5c96daef9b87] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 602.045266] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e73433a-d00e-4425-870b-9eb6354fac45 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.085820] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 50b84adc-5ff3-4a1e-a09f-5c96daef9b87] Reconfiguring VM instance instance-00000005 to attach disk [datastore2] 50b84adc-5ff3-4a1e-a09f-5c96daef9b87/50b84adc-5ff3-4a1e-a09f-5c96daef9b87.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 602.085820] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0d2d45bd-c9b8-4c3c-9752-ad71475168b3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.116645] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Waiting for the task: (returnval){ [ 602.116645] env[69328]: value = "task-3272673" [ 602.116645] env[69328]: _type = "Task" [ 602.116645] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.122785] env[69328]: DEBUG nova.network.neutron [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] Successfully created port: b801ae0c-2061-4103-8530-3d58f8785333 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 602.129478] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3272673, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.134166] env[69328]: DEBUG nova.scheduler.client.report [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 602.154943] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Acquiring lock "refresh_cache-d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 602.155193] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Acquired lock "refresh_cache-d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 602.155335] env[69328]: DEBUG nova.network.neutron [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 602.239096] env[69328]: DEBUG nova.compute.manager [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 602.265878] env[69328]: DEBUG nova.virt.hardware [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 602.266132] env[69328]: DEBUG nova.virt.hardware [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 602.266289] env[69328]: DEBUG nova.virt.hardware [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 602.266526] env[69328]: DEBUG nova.virt.hardware [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 602.266690] env[69328]: DEBUG nova.virt.hardware [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 602.266840] env[69328]: DEBUG nova.virt.hardware [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 602.267058] env[69328]: DEBUG nova.virt.hardware [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 602.267250] env[69328]: DEBUG nova.virt.hardware [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 602.267427] env[69328]: DEBUG nova.virt.hardware [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 602.267536] env[69328]: DEBUG nova.virt.hardware [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 602.267726] env[69328]: DEBUG nova.virt.hardware [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 602.268406] env[69328]: DEBUG nova.compute.manager [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 602.271541] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0201b79-8a4b-4b1a-82aa-e0cac791f3ea {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.279985] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a246d1f9-eeb7-4e3d-90d7-535cb577850b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.349783] env[69328]: DEBUG oslo_vmware.api [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Task: {'id': task-3272666, 'name': Rename_Task, 'duration_secs': 1.407702} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.350107] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 602.350107] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6bcf58b6-2152-4c27-800c-f601bce3cf0a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.359658] env[69328]: DEBUG oslo_vmware.api [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Waiting for the task: (returnval){ [ 602.359658] env[69328]: value = "task-3272674" [ 602.359658] env[69328]: _type = "Task" [ 602.359658] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.367988] env[69328]: DEBUG oslo_concurrency.lockutils [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 602.367988] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 602.367988] env[69328]: DEBUG oslo_concurrency.lockutils [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 602.367988] env[69328]: DEBUG oslo_concurrency.lockutils [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 602.368384] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 602.372707] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-54652d6a-29de-4238-bca4-2fbd106a0755 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.374182] env[69328]: DEBUG oslo_vmware.api [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Task: {'id': task-3272674, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.380206] env[69328]: DEBUG oslo_concurrency.lockutils [None req-04760df6-4c22-4338-91c2-c0de3a862fc2 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Lock "676173ee-8001-48c6-bd28-09130f6dd99a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.367s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 602.385340] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 602.385532] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 602.386748] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30e0d8ff-b029-4af2-9363-a1abc2f4581b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.396062] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40e2b1db-0e27-4a97-bd03-9566d8c59b29 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.402082] env[69328]: DEBUG oslo_vmware.api [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Waiting for the task: (returnval){ [ 602.402082] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ffca80-e0f9-2f03-f34a-268576a82b64" [ 602.402082] env[69328]: _type = "Task" [ 602.402082] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.411099] env[69328]: DEBUG oslo_vmware.api [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ffca80-e0f9-2f03-f34a-268576a82b64, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.426310] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3272671, 'name': ReconfigVM_Task, 'duration_secs': 0.626281} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.426581] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 230c6278-65af-4f5d-b817-0b695086c29d] Reconfigured VM instance instance-00000004 to attach disk [datastore2] 230c6278-65af-4f5d-b817-0b695086c29d/230c6278-65af-4f5d-b817-0b695086c29d.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 602.427098] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a4c34445-4020-4e61-8aaf-a8e7b740ac05 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.432958] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Waiting for the task: (returnval){ [ 602.432958] env[69328]: value = "task-3272675" [ 602.432958] env[69328]: _type = "Task" [ 602.432958] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.441800] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3272675, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.450895] env[69328]: DEBUG oslo_concurrency.lockutils [req-f653844c-3e78-4a7e-86c9-979760fa7c8a req-095674d4-a7b7-4028-9fc0-793474d21918 service nova] Releasing lock "refresh_cache-50b84adc-5ff3-4a1e-a09f-5c96daef9b87" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 602.457270] env[69328]: DEBUG nova.network.neutron [req-c96ea918-ec4c-4d15-989f-1ffca9cd0ff1 req-5dc64386-2d24-4851-9b84-437a7dbf7cc5 service nova] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Updated VIF entry in instance network info cache for port 03adda47-e195-413d-85d7-5fd0c5a5027b. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 602.457670] env[69328]: DEBUG nova.network.neutron [req-c96ea918-ec4c-4d15-989f-1ffca9cd0ff1 req-5dc64386-2d24-4851-9b84-437a7dbf7cc5 service nova] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Updating instance_info_cache with network_info: [{"id": "03adda47-e195-413d-85d7-5fd0c5a5027b", "address": "fa:16:3e:c6:a1:68", "network": {"id": "4707aea3-ce46-4fe0-bf5c-141ee5596a86", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.109", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ecd184c6b78b4e4297fb93abb94aa37d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap03adda47-e1", "ovs_interfaceid": "03adda47-e195-413d-85d7-5fd0c5a5027b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 602.630241] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3272673, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.639942] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.433s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 602.641921] env[69328]: DEBUG nova.compute.manager [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: ed10d511-dbed-4884-8ac6-f737173f62c5] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 602.645990] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.185s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 602.647632] env[69328]: INFO nova.compute.claims [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 602.709041] env[69328]: DEBUG nova.network.neutron [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 602.724885] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Acquiring lock "bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 602.725228] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Lock "bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 602.809037] env[69328]: DEBUG oslo_concurrency.lockutils [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 602.876326] env[69328]: DEBUG oslo_vmware.api [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Task: {'id': task-3272674, 'name': PowerOnVM_Task} progress is 37%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.884829] env[69328]: DEBUG nova.compute.manager [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 602.918914] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Preparing fetch location {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 602.918914] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Creating directory with path [datastore1] vmware_temp/d27dc7a0-0a9f-4df7-a32c-6a760fb08e4c/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 602.919125] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dc56a480-6cf6-4ba9-9fe9-7deec7c36560 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.955766] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Created directory with path [datastore1] vmware_temp/d27dc7a0-0a9f-4df7-a32c-6a760fb08e4c/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 602.956359] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Fetch image to [datastore1] vmware_temp/d27dc7a0-0a9f-4df7-a32c-6a760fb08e4c/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/tmp-sparse.vmdk {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 602.956359] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Downloading image file data a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 to [datastore1] vmware_temp/d27dc7a0-0a9f-4df7-a32c-6a760fb08e4c/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/tmp-sparse.vmdk on the data store datastore1 {{(pid=69328) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 602.958112] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99460bd4-b1dc-4465-b803-99dc91516aab {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.966768] env[69328]: DEBUG oslo_concurrency.lockutils [req-c96ea918-ec4c-4d15-989f-1ffca9cd0ff1 req-5dc64386-2d24-4851-9b84-437a7dbf7cc5 service nova] Releasing lock "refresh_cache-caba3b5c-db15-4de6-8d3d-41f6751f1b83" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 602.967225] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3272675, 'name': Rename_Task, 'duration_secs': 0.149159} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.967917] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 230c6278-65af-4f5d-b817-0b695086c29d] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 602.969146] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-180197e3-6d4b-4b45-90a4-ac9bb01cb7af {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.975261] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2535301-200e-4b7e-bf4d-29a411506e34 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.981291] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Waiting for the task: (returnval){ [ 602.981291] env[69328]: value = "task-3272676" [ 602.981291] env[69328]: _type = "Task" [ 602.981291] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.993466] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8914b1e-8837-41ac-b36f-42e6eb3dbc49 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.005330] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3272676, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.038161] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e73d25a-4b83-42f5-92b2-dbfa27140dcf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.044997] env[69328]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-a5ac472d-2537-4cf0-a726-b8f939c52cfa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.069085] env[69328]: DEBUG nova.virt.vmwareapi.images [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Downloading image file data a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 to the data store datastore1 {{(pid=69328) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 603.113590] env[69328]: DEBUG nova.network.neutron [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Updating instance_info_cache with network_info: [{"id": "cbf38f9d-1507-45bb-9684-bf804c86b93b", "address": "fa:16:3e:a1:39:be", "network": {"id": "4707aea3-ce46-4fe0-bf5c-141ee5596a86", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.236", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ecd184c6b78b4e4297fb93abb94aa37d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbf38f9d-15", "ovs_interfaceid": "cbf38f9d-1507-45bb-9684-bf804c86b93b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 603.130336] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3272673, 'name': ReconfigVM_Task, 'duration_secs': 0.617035} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.134335] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 50b84adc-5ff3-4a1e-a09f-5c96daef9b87] Reconfigured VM instance instance-00000005 to attach disk [datastore2] 50b84adc-5ff3-4a1e-a09f-5c96daef9b87/50b84adc-5ff3-4a1e-a09f-5c96daef9b87.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 603.136137] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d6db9302-b28b-4ab4-9c62-020c2bda06aa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.144462] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Waiting for the task: (returnval){ [ 603.144462] env[69328]: value = "task-3272677" [ 603.144462] env[69328]: _type = "Task" [ 603.144462] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.156746] env[69328]: DEBUG nova.compute.utils [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 603.164907] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3272677, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.165608] env[69328]: DEBUG oslo_vmware.rw_handles [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d27dc7a0-0a9f-4df7-a32c-6a760fb08e4c/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69328) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 603.167483] env[69328]: DEBUG nova.compute.manager [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: ed10d511-dbed-4884-8ac6-f737173f62c5] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 603.167674] env[69328]: DEBUG nova.network.neutron [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: ed10d511-dbed-4884-8ac6-f737173f62c5] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 603.278853] env[69328]: DEBUG nova.policy [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '43be625728f24af5a2f6a650279d689d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cdc479a290524130b9d17e627a64b65a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 603.375914] env[69328]: DEBUG oslo_vmware.api [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Task: {'id': task-3272674, 'name': PowerOnVM_Task} progress is 91%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.417766] env[69328]: DEBUG oslo_concurrency.lockutils [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 603.455481] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Acquiring lock "f428f9a9-d792-4c1c-b2d4-ea066cc09d67" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 603.455690] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Lock "f428f9a9-d792-4c1c-b2d4-ea066cc09d67" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 603.493017] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3272676, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.617386] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Releasing lock "refresh_cache-d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 603.617386] env[69328]: DEBUG nova.compute.manager [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Instance network_info: |[{"id": "cbf38f9d-1507-45bb-9684-bf804c86b93b", "address": "fa:16:3e:a1:39:be", "network": {"id": "4707aea3-ce46-4fe0-bf5c-141ee5596a86", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.236", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ecd184c6b78b4e4297fb93abb94aa37d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbf38f9d-15", "ovs_interfaceid": "cbf38f9d-1507-45bb-9684-bf804c86b93b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 603.617734] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a1:39:be', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a8b99a46-3e7f-4ef1-9e45-58e6cd17f210', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cbf38f9d-1507-45bb-9684-bf804c86b93b', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 603.626816] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Creating folder: Project (f2aed2695f2d437fbe9202124d2ed95b). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 603.630421] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2db3cec9-c7d3-407d-99f8-8e926a5d4e1f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.642529] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Created folder: Project (f2aed2695f2d437fbe9202124d2ed95b) in parent group-v653649. [ 603.642741] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Creating folder: Instances. Parent ref: group-v653666. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 603.643118] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-db4dcca4-d4f9-4885-8170-78b198b5d522 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.656581] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3272677, 'name': Rename_Task, 'duration_secs': 0.17274} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.657030] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 50b84adc-5ff3-4a1e-a09f-5c96daef9b87] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 603.657296] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cef0b71d-841b-4eee-9143-3a0c6ca62c43 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.663362] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Waiting for the task: (returnval){ [ 603.663362] env[69328]: value = "task-3272680" [ 603.663362] env[69328]: _type = "Task" [ 603.663362] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.668170] env[69328]: DEBUG nova.compute.manager [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: ed10d511-dbed-4884-8ac6-f737173f62c5] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 603.671061] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Created folder: Instances in parent group-v653666. [ 603.675074] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 603.677192] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 603.683626] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e13758ea-fc32-4d4e-afdd-b33e2b247407 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.699938] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3272680, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.700928] env[69328]: DEBUG nova.network.neutron [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: ed10d511-dbed-4884-8ac6-f737173f62c5] Successfully created port: 742f3021-311f-4b36-9507-03a493f2b49f {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 603.713451] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 603.713451] env[69328]: value = "task-3272681" [ 603.713451] env[69328]: _type = "Task" [ 603.713451] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.723336] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272681, 'name': CreateVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.883909] env[69328]: DEBUG oslo_vmware.api [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Task: {'id': task-3272674, 'name': PowerOnVM_Task, 'duration_secs': 1.021397} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.891474] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 603.891698] env[69328]: INFO nova.compute.manager [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] Took 13.46 seconds to spawn the instance on the hypervisor. [ 603.891889] env[69328]: DEBUG nova.compute.manager [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 603.893211] env[69328]: DEBUG oslo_vmware.rw_handles [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Completed reading data from the image iterator. {{(pid=69328) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 603.893384] env[69328]: DEBUG oslo_vmware.rw_handles [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d27dc7a0-0a9f-4df7-a32c-6a760fb08e4c/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69328) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 603.894278] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-267d271e-cd14-4ad4-b4f0-d1a5152dfb86 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.995634] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3272676, 'name': PowerOnVM_Task, 'duration_secs': 0.51569} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.998621] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 230c6278-65af-4f5d-b817-0b695086c29d] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 603.998891] env[69328]: INFO nova.compute.manager [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 230c6278-65af-4f5d-b817-0b695086c29d] Took 11.22 seconds to spawn the instance on the hypervisor. [ 603.999507] env[69328]: DEBUG nova.compute.manager [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 230c6278-65af-4f5d-b817-0b695086c29d] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 604.000755] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-473bfcc3-21a4-423a-9a2f-f6ed9e0642e3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.015037] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14af3a95-fe06-4113-b73a-201f167e168c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.023176] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3e9a706-06c9-4c82-afb7-27912cf6483a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.062903] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29bbc28d-84c8-4d53-ad17-dc1718c29af4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.071323] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcd25d88-e8ae-4877-baf8-94d574103fd0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.087097] env[69328]: DEBUG nova.compute.provider_tree [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 604.180510] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3272680, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.210702] env[69328]: DEBUG nova.virt.vmwareapi.images [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Downloaded image file data a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 to vmware_temp/d27dc7a0-0a9f-4df7-a32c-6a760fb08e4c/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/tmp-sparse.vmdk on the data store datastore1 {{(pid=69328) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 604.212772] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Caching image {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 604.212969] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Copying Virtual Disk [datastore1] vmware_temp/d27dc7a0-0a9f-4df7-a32c-6a760fb08e4c/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/tmp-sparse.vmdk to [datastore1] vmware_temp/d27dc7a0-0a9f-4df7-a32c-6a760fb08e4c/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 604.214172] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2eacf39f-3457-4d55-8814-921c54d90dc5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.233543] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272681, 'name': CreateVM_Task, 'duration_secs': 0.402644} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.234725] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 604.235314] env[69328]: DEBUG oslo_vmware.api [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Waiting for the task: (returnval){ [ 604.235314] env[69328]: value = "task-3272682" [ 604.235314] env[69328]: _type = "Task" [ 604.235314] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.235811] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 604.235953] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 604.236300] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 604.240409] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e54620e-a15b-497a-8acf-e923e98a5159 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.246910] env[69328]: DEBUG oslo_vmware.api [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Waiting for the task: (returnval){ [ 604.246910] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5219e26f-94b2-d47c-6a7c-96140dcb6ce7" [ 604.246910] env[69328]: _type = "Task" [ 604.246910] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.251418] env[69328]: DEBUG oslo_vmware.api [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Task: {'id': task-3272682, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.263609] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 604.264200] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 604.264200] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 604.418913] env[69328]: INFO nova.compute.manager [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] Took 22.09 seconds to build instance. [ 604.444881] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Acquiring lock "7b348a95-3ab2-4112-87e3-b17504c0a302" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 604.445547] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Lock "7b348a95-3ab2-4112-87e3-b17504c0a302" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 604.523171] env[69328]: INFO nova.compute.manager [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 230c6278-65af-4f5d-b817-0b695086c29d] Took 21.71 seconds to build instance. [ 604.536641] env[69328]: DEBUG nova.compute.manager [req-0e23d4b1-09cc-42fb-8c98-7e51778597ff req-e2ff6b8a-5a63-4493-8496-e48599f98193 service nova] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Received event network-vif-plugged-cbf38f9d-1507-45bb-9684-bf804c86b93b {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 604.536641] env[69328]: DEBUG oslo_concurrency.lockutils [req-0e23d4b1-09cc-42fb-8c98-7e51778597ff req-e2ff6b8a-5a63-4493-8496-e48599f98193 service nova] Acquiring lock "d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 604.536997] env[69328]: DEBUG oslo_concurrency.lockutils [req-0e23d4b1-09cc-42fb-8c98-7e51778597ff req-e2ff6b8a-5a63-4493-8496-e48599f98193 service nova] Lock "d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 604.536997] env[69328]: DEBUG oslo_concurrency.lockutils [req-0e23d4b1-09cc-42fb-8c98-7e51778597ff req-e2ff6b8a-5a63-4493-8496-e48599f98193 service nova] Lock "d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 604.537799] env[69328]: DEBUG nova.compute.manager [req-0e23d4b1-09cc-42fb-8c98-7e51778597ff req-e2ff6b8a-5a63-4493-8496-e48599f98193 service nova] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] No waiting events found dispatching network-vif-plugged-cbf38f9d-1507-45bb-9684-bf804c86b93b {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 604.537799] env[69328]: WARNING nova.compute.manager [req-0e23d4b1-09cc-42fb-8c98-7e51778597ff req-e2ff6b8a-5a63-4493-8496-e48599f98193 service nova] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Received unexpected event network-vif-plugged-cbf38f9d-1507-45bb-9684-bf804c86b93b for instance with vm_state building and task_state spawning. [ 604.537799] env[69328]: DEBUG nova.compute.manager [req-0e23d4b1-09cc-42fb-8c98-7e51778597ff req-e2ff6b8a-5a63-4493-8496-e48599f98193 service nova] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Received event network-changed-cbf38f9d-1507-45bb-9684-bf804c86b93b {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 604.537799] env[69328]: DEBUG nova.compute.manager [req-0e23d4b1-09cc-42fb-8c98-7e51778597ff req-e2ff6b8a-5a63-4493-8496-e48599f98193 service nova] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Refreshing instance network info cache due to event network-changed-cbf38f9d-1507-45bb-9684-bf804c86b93b. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 604.538132] env[69328]: DEBUG oslo_concurrency.lockutils [req-0e23d4b1-09cc-42fb-8c98-7e51778597ff req-e2ff6b8a-5a63-4493-8496-e48599f98193 service nova] Acquiring lock "refresh_cache-d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 604.538132] env[69328]: DEBUG oslo_concurrency.lockutils [req-0e23d4b1-09cc-42fb-8c98-7e51778597ff req-e2ff6b8a-5a63-4493-8496-e48599f98193 service nova] Acquired lock "refresh_cache-d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 604.538962] env[69328]: DEBUG nova.network.neutron [req-0e23d4b1-09cc-42fb-8c98-7e51778597ff req-e2ff6b8a-5a63-4493-8496-e48599f98193 service nova] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Refreshing network info cache for port cbf38f9d-1507-45bb-9684-bf804c86b93b {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 604.591735] env[69328]: DEBUG nova.scheduler.client.report [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 604.675822] env[69328]: DEBUG oslo_vmware.api [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3272680, 'name': PowerOnVM_Task, 'duration_secs': 0.681503} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.675960] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 50b84adc-5ff3-4a1e-a09f-5c96daef9b87] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 604.676458] env[69328]: INFO nova.compute.manager [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 50b84adc-5ff3-4a1e-a09f-5c96daef9b87] Took 9.58 seconds to spawn the instance on the hypervisor. [ 604.676982] env[69328]: DEBUG nova.compute.manager [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 50b84adc-5ff3-4a1e-a09f-5c96daef9b87] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 604.678031] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e5282cc-ac50-4604-b4f7-799fc6bc17a6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.683139] env[69328]: DEBUG nova.compute.manager [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: ed10d511-dbed-4884-8ac6-f737173f62c5] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 604.710242] env[69328]: DEBUG nova.network.neutron [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] Successfully updated port: b801ae0c-2061-4103-8530-3d58f8785333 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 604.725905] env[69328]: DEBUG nova.virt.hardware [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 604.725905] env[69328]: DEBUG nova.virt.hardware [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 604.725905] env[69328]: DEBUG nova.virt.hardware [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 604.726073] env[69328]: DEBUG nova.virt.hardware [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 604.726521] env[69328]: DEBUG nova.virt.hardware [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 604.726521] env[69328]: DEBUG nova.virt.hardware [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 604.726521] env[69328]: DEBUG nova.virt.hardware [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 604.726743] env[69328]: DEBUG nova.virt.hardware [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 604.726785] env[69328]: DEBUG nova.virt.hardware [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 604.726920] env[69328]: DEBUG nova.virt.hardware [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 604.727108] env[69328]: DEBUG nova.virt.hardware [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 604.728059] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffed0c1b-7b44-4de8-b704-e1cb6a3a76e1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.743489] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e472461-b03e-42bb-8660-0dd4f1ba5f1b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.767906] env[69328]: DEBUG oslo_vmware.api [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Task: {'id': task-3272682, 'name': CopyVirtualDisk_Task} progress is 35%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.923251] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fd3e602b-721a-4336-bf2a-5bbf593f034f tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Lock "49a668a7-5967-46a9-823f-7f613d34d152" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.609s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 605.028241] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Lock "230c6278-65af-4f5d-b817-0b695086c29d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.227s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 605.098824] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.452s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 605.098824] env[69328]: DEBUG nova.compute.manager [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 605.101540] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 13.948s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 605.102762] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 605.102762] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69328) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 605.102762] env[69328]: DEBUG oslo_concurrency.lockutils [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.294s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 605.103911] env[69328]: INFO nova.compute.claims [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 605.107258] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fd98a80-e89a-4af4-a933-b65096b82707 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.121920] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fe34c6b-3a49-4226-b07b-aae2e2e4cc88 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.147217] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4476689-01ef-4419-91c2-90496a873e75 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.157226] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdea0722-e026-4259-9bae-3f2c870da095 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.191879] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181009MB free_disk=116GB free_vcpus=48 pci_devices=None {{(pid=69328) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 605.192073] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 605.211018] env[69328]: INFO nova.compute.manager [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 50b84adc-5ff3-4a1e-a09f-5c96daef9b87] Took 22.36 seconds to build instance. [ 605.215734] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Acquiring lock "refresh_cache-d97dc6d5-e55f-4b9e-91e6-cfdea82f5236" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 605.215919] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Acquired lock "refresh_cache-d97dc6d5-e55f-4b9e-91e6-cfdea82f5236" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 605.216302] env[69328]: DEBUG nova.network.neutron [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 605.253420] env[69328]: DEBUG oslo_vmware.api [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Task: {'id': task-3272682, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.996688} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.254446] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Copied Virtual Disk [datastore1] vmware_temp/d27dc7a0-0a9f-4df7-a32c-6a760fb08e4c/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/tmp-sparse.vmdk to [datastore1] vmware_temp/d27dc7a0-0a9f-4df7-a32c-6a760fb08e4c/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 605.254446] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Deleting the datastore file [datastore1] vmware_temp/d27dc7a0-0a9f-4df7-a32c-6a760fb08e4c/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/tmp-sparse.vmdk {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 605.254446] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-08468aac-2f29-481e-accd-bd47b5d82111 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.268061] env[69328]: DEBUG oslo_vmware.api [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Waiting for the task: (returnval){ [ 605.268061] env[69328]: value = "task-3272683" [ 605.268061] env[69328]: _type = "Task" [ 605.268061] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.279532] env[69328]: DEBUG oslo_vmware.api [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Task: {'id': task-3272683, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.427354] env[69328]: DEBUG nova.compute.manager [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] [instance: edb1a21a-6907-4198-a977-c1213e8fecc0] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 605.529592] env[69328]: DEBUG nova.compute.manager [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 605.608746] env[69328]: DEBUG nova.compute.utils [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 605.610127] env[69328]: DEBUG nova.compute.manager [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 605.610965] env[69328]: DEBUG nova.network.neutron [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 605.636725] env[69328]: DEBUG nova.network.neutron [req-0e23d4b1-09cc-42fb-8c98-7e51778597ff req-e2ff6b8a-5a63-4493-8496-e48599f98193 service nova] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Updated VIF entry in instance network info cache for port cbf38f9d-1507-45bb-9684-bf804c86b93b. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 605.638028] env[69328]: DEBUG nova.network.neutron [req-0e23d4b1-09cc-42fb-8c98-7e51778597ff req-e2ff6b8a-5a63-4493-8496-e48599f98193 service nova] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Updating instance_info_cache with network_info: [{"id": "cbf38f9d-1507-45bb-9684-bf804c86b93b", "address": "fa:16:3e:a1:39:be", "network": {"id": "4707aea3-ce46-4fe0-bf5c-141ee5596a86", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.236", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ecd184c6b78b4e4297fb93abb94aa37d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbf38f9d-15", "ovs_interfaceid": "cbf38f9d-1507-45bb-9684-bf804c86b93b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 605.706108] env[69328]: DEBUG nova.policy [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fa91f3dc174d4c33afc82e56dd2bf758', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8502178b3d334c338b63dfde3eae8f08', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 605.719242] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0bb8f957-3916-4bbb-af23-2b2f57c6df90 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Lock "50b84adc-5ff3-4a1e-a09f-5c96daef9b87" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.884s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 605.787365] env[69328]: DEBUG oslo_vmware.api [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Task: {'id': task-3272683, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.038689} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.787365] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 605.787365] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Moving file from [datastore1] vmware_temp/d27dc7a0-0a9f-4df7-a32c-6a760fb08e4c/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 to [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318. {{(pid=69328) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 605.787365] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-971a9ec1-368a-4540-ad8f-ec83fc1fe29f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.799532] env[69328]: DEBUG oslo_vmware.api [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Waiting for the task: (returnval){ [ 605.799532] env[69328]: value = "task-3272684" [ 605.799532] env[69328]: _type = "Task" [ 605.799532] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.814707] env[69328]: DEBUG nova.network.neutron [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 605.824711] env[69328]: DEBUG oslo_vmware.api [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Task: {'id': task-3272684, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.949119] env[69328]: DEBUG nova.network.neutron [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: ed10d511-dbed-4884-8ac6-f737173f62c5] Successfully updated port: 742f3021-311f-4b36-9507-03a493f2b49f {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 605.975251] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 606.053625] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 606.114524] env[69328]: DEBUG nova.compute.manager [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 606.143212] env[69328]: DEBUG oslo_concurrency.lockutils [req-0e23d4b1-09cc-42fb-8c98-7e51778597ff req-e2ff6b8a-5a63-4493-8496-e48599f98193 service nova] Releasing lock "refresh_cache-d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 606.229383] env[69328]: DEBUG nova.compute.manager [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] [instance: f428f9a9-d792-4c1c-b2d4-ea066cc09d67] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 606.274362] env[69328]: DEBUG nova.network.neutron [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] Updating instance_info_cache with network_info: [{"id": "b801ae0c-2061-4103-8530-3d58f8785333", "address": "fa:16:3e:09:6a:91", "network": {"id": "4707aea3-ce46-4fe0-bf5c-141ee5596a86", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.186", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ecd184c6b78b4e4297fb93abb94aa37d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb801ae0c-20", "ovs_interfaceid": "b801ae0c-2061-4103-8530-3d58f8785333", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 606.314399] env[69328]: DEBUG oslo_vmware.api [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Task: {'id': task-3272684, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.054043} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.314923] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] File moved {{(pid=69328) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 606.315130] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Cleaning up location [datastore1] vmware_temp/d27dc7a0-0a9f-4df7-a32c-6a760fb08e4c {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 606.315286] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Deleting the datastore file [datastore1] vmware_temp/d27dc7a0-0a9f-4df7-a32c-6a760fb08e4c {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 606.315544] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e0a6ee9e-0096-4145-8b63-337d7e0b98f0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.325233] env[69328]: DEBUG oslo_vmware.api [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Waiting for the task: (returnval){ [ 606.325233] env[69328]: value = "task-3272685" [ 606.325233] env[69328]: _type = "Task" [ 606.325233] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.345587] env[69328]: DEBUG oslo_vmware.api [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Task: {'id': task-3272685, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.456967] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "refresh_cache-ed10d511-dbed-4884-8ac6-f737173f62c5" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 606.456967] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquired lock "refresh_cache-ed10d511-dbed-4884-8ac6-f737173f62c5" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 606.457793] env[69328]: DEBUG nova.network.neutron [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: ed10d511-dbed-4884-8ac6-f737173f62c5] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 606.568271] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Acquiring lock "6102f8e6-f815-4f5f-921f-990be81fca0d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 606.568511] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Lock "6102f8e6-f815-4f5f-921f-990be81fca0d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 606.575361] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78b55141-1bdb-452e-9208-8968a22a7448 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.584417] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdde9566-ef6c-4c24-a6b2-cb6080db0c77 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.591845] env[69328]: DEBUG nova.network.neutron [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Successfully created port: 09c4fb65-f87f-4fdc-9a85-cf73224a3ca3 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 606.619807] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34b4ecdb-df2f-43f5-a313-b6405668f14f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.631950] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e709df0b-d676-46cf-b1d8-67b4e49a6c19 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.663939] env[69328]: DEBUG nova.compute.provider_tree [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 606.733065] env[69328]: DEBUG nova.compute.manager [None req-9cf29b78-f570-49b3-9061-6d78e2f470c8 tempest-ServerDiagnosticsTest-366039912 tempest-ServerDiagnosticsTest-366039912-project-admin] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 606.734652] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa4b6875-d904-4f10-878e-8110824026a7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.745197] env[69328]: INFO nova.compute.manager [None req-9cf29b78-f570-49b3-9061-6d78e2f470c8 tempest-ServerDiagnosticsTest-366039912 tempest-ServerDiagnosticsTest-366039912-project-admin] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] Retrieving diagnostics [ 606.745849] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ec8b235-e99c-4c18-b55e-0a6f44bcc59f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.782073] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 606.782368] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Releasing lock "refresh_cache-d97dc6d5-e55f-4b9e-91e6-cfdea82f5236" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 606.782689] env[69328]: DEBUG nova.compute.manager [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] Instance network_info: |[{"id": "b801ae0c-2061-4103-8530-3d58f8785333", "address": "fa:16:3e:09:6a:91", "network": {"id": "4707aea3-ce46-4fe0-bf5c-141ee5596a86", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.186", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ecd184c6b78b4e4297fb93abb94aa37d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb801ae0c-20", "ovs_interfaceid": "b801ae0c-2061-4103-8530-3d58f8785333", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 606.783344] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:09:6a:91', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a8b99a46-3e7f-4ef1-9e45-58e6cd17f210', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b801ae0c-2061-4103-8530-3d58f8785333', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 606.792070] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Creating folder: Project (066e4edc64b44104b45e81bdb34e4d5d). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 606.792356] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4425418c-728d-49c9-8b4e-e5eb12abc6b6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.808345] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Created folder: Project (066e4edc64b44104b45e81bdb34e4d5d) in parent group-v653649. [ 606.808625] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Creating folder: Instances. Parent ref: group-v653669. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 606.809236] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9d78bd33-ab05-48f8-8973-79a2b8daa120 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.821701] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Created folder: Instances in parent group-v653669. [ 606.822063] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 606.822318] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 606.822973] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-447777fb-a9b6-4c27-b3ba-bf7ed909b6bf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.850818] env[69328]: DEBUG oslo_vmware.api [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Task: {'id': task-3272685, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.033584} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.855834] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 606.856713] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 606.856713] env[69328]: value = "task-3272688" [ 606.856713] env[69328]: _type = "Task" [ 606.856713] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.856899] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7108f42-22e5-444e-a123-66d5f0a7726a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.866649] env[69328]: DEBUG oslo_vmware.api [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Waiting for the task: (returnval){ [ 606.866649] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52a7e294-955f-931a-2c0a-33a2ea837b63" [ 606.866649] env[69328]: _type = "Task" [ 606.866649] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.879199] env[69328]: DEBUG oslo_vmware.api [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52a7e294-955f-931a-2c0a-33a2ea837b63, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.072583] env[69328]: DEBUG nova.network.neutron [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: ed10d511-dbed-4884-8ac6-f737173f62c5] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 607.128215] env[69328]: DEBUG nova.compute.manager [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 607.167009] env[69328]: DEBUG nova.virt.hardware [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 607.167613] env[69328]: DEBUG nova.virt.hardware [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 607.167960] env[69328]: DEBUG nova.virt.hardware [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 607.168671] env[69328]: DEBUG nova.virt.hardware [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 607.168976] env[69328]: DEBUG nova.virt.hardware [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 607.169369] env[69328]: DEBUG nova.virt.hardware [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 607.170069] env[69328]: DEBUG nova.virt.hardware [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 607.170393] env[69328]: DEBUG nova.virt.hardware [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 607.170705] env[69328]: DEBUG nova.virt.hardware [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 607.171020] env[69328]: DEBUG nova.virt.hardware [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 607.171428] env[69328]: DEBUG nova.virt.hardware [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 607.172789] env[69328]: DEBUG nova.scheduler.client.report [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 607.176577] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa2f2867-2db2-4ed6-89c1-2e3b8bd144e7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.190370] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a94e3f5-b747-4507-8c81-146809fda8e0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.370555] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272688, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.380313] env[69328]: DEBUG oslo_vmware.api [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52a7e294-955f-931a-2c0a-33a2ea837b63, 'name': SearchDatastore_Task, 'duration_secs': 0.01308} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.380579] env[69328]: DEBUG oslo_concurrency.lockutils [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 607.380852] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] caba3b5c-db15-4de6-8d3d-41f6751f1b83/caba3b5c-db15-4de6-8d3d-41f6751f1b83.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 607.381193] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 607.381374] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 607.381581] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e30a0934-952d-4dbd-b7f3-afd034468e9b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.383738] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-595dded5-5e72-469b-bdc3-163ddd919060 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.393544] env[69328]: DEBUG oslo_vmware.api [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Waiting for the task: (returnval){ [ 607.393544] env[69328]: value = "task-3272689" [ 607.393544] env[69328]: _type = "Task" [ 607.393544] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.397588] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 607.397838] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 607.399011] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b55017e8-846f-4e17-873b-29aa3a8d9c98 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.406526] env[69328]: DEBUG oslo_vmware.api [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Task: {'id': task-3272689, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.411454] env[69328]: DEBUG oslo_vmware.api [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Waiting for the task: (returnval){ [ 607.411454] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5278bb06-244b-14f0-ba12-35f024939039" [ 607.411454] env[69328]: _type = "Task" [ 607.411454] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.426233] env[69328]: DEBUG oslo_vmware.api [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5278bb06-244b-14f0-ba12-35f024939039, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.685922] env[69328]: DEBUG oslo_concurrency.lockutils [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.583s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 607.686490] env[69328]: DEBUG nova.compute.manager [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 607.690680] env[69328]: DEBUG oslo_concurrency.lockutils [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.273s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 607.692593] env[69328]: INFO nova.compute.claims [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 607.730035] env[69328]: DEBUG nova.network.neutron [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: ed10d511-dbed-4884-8ac6-f737173f62c5] Updating instance_info_cache with network_info: [{"id": "742f3021-311f-4b36-9507-03a493f2b49f", "address": "fa:16:3e:c1:17:c3", "network": {"id": "620f277d-ca49-41da-83a0-afb8c393e26e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1223326239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cdc479a290524130b9d17e627a64b65a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86a35d07-53d3-46b3-92cb-ae34236c0f41", "external-id": "nsx-vlan-transportzone-811", "segmentation_id": 811, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap742f3021-31", "ovs_interfaceid": "742f3021-311f-4b36-9507-03a493f2b49f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 607.734169] env[69328]: DEBUG nova.compute.manager [req-e21dfa96-62a5-470b-8568-20f5b22c9bdc req-f7a5e30c-ed75-464e-b23b-ab40d0e93244 service nova] [instance: ed10d511-dbed-4884-8ac6-f737173f62c5] Received event network-vif-plugged-742f3021-311f-4b36-9507-03a493f2b49f {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 607.734418] env[69328]: DEBUG oslo_concurrency.lockutils [req-e21dfa96-62a5-470b-8568-20f5b22c9bdc req-f7a5e30c-ed75-464e-b23b-ab40d0e93244 service nova] Acquiring lock "ed10d511-dbed-4884-8ac6-f737173f62c5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 607.734666] env[69328]: DEBUG oslo_concurrency.lockutils [req-e21dfa96-62a5-470b-8568-20f5b22c9bdc req-f7a5e30c-ed75-464e-b23b-ab40d0e93244 service nova] Lock "ed10d511-dbed-4884-8ac6-f737173f62c5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 607.735627] env[69328]: DEBUG oslo_concurrency.lockutils [req-e21dfa96-62a5-470b-8568-20f5b22c9bdc req-f7a5e30c-ed75-464e-b23b-ab40d0e93244 service nova] Lock "ed10d511-dbed-4884-8ac6-f737173f62c5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 607.735627] env[69328]: DEBUG nova.compute.manager [req-e21dfa96-62a5-470b-8568-20f5b22c9bdc req-f7a5e30c-ed75-464e-b23b-ab40d0e93244 service nova] [instance: ed10d511-dbed-4884-8ac6-f737173f62c5] No waiting events found dispatching network-vif-plugged-742f3021-311f-4b36-9507-03a493f2b49f {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 607.735627] env[69328]: WARNING nova.compute.manager [req-e21dfa96-62a5-470b-8568-20f5b22c9bdc req-f7a5e30c-ed75-464e-b23b-ab40d0e93244 service nova] [instance: ed10d511-dbed-4884-8ac6-f737173f62c5] Received unexpected event network-vif-plugged-742f3021-311f-4b36-9507-03a493f2b49f for instance with vm_state building and task_state spawning. [ 607.873443] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272688, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.905778] env[69328]: DEBUG oslo_vmware.api [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Task: {'id': task-3272689, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.922885] env[69328]: DEBUG oslo_vmware.api [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5278bb06-244b-14f0-ba12-35f024939039, 'name': SearchDatastore_Task, 'duration_secs': 0.036597} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.924415] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d2fb7467-470b-46aa-891b-70631e16fddd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.931590] env[69328]: DEBUG oslo_vmware.api [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Waiting for the task: (returnval){ [ 607.931590] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]524b1deb-11d8-8e50-fae5-c9d55b66010f" [ 607.931590] env[69328]: _type = "Task" [ 607.931590] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.944068] env[69328]: DEBUG oslo_vmware.api [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]524b1deb-11d8-8e50-fae5-c9d55b66010f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.195380] env[69328]: DEBUG nova.compute.utils [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 608.195380] env[69328]: DEBUG nova.compute.manager [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 608.195380] env[69328]: DEBUG nova.network.neutron [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 608.237221] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Releasing lock "refresh_cache-ed10d511-dbed-4884-8ac6-f737173f62c5" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 608.239660] env[69328]: DEBUG nova.compute.manager [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: ed10d511-dbed-4884-8ac6-f737173f62c5] Instance network_info: |[{"id": "742f3021-311f-4b36-9507-03a493f2b49f", "address": "fa:16:3e:c1:17:c3", "network": {"id": "620f277d-ca49-41da-83a0-afb8c393e26e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1223326239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cdc479a290524130b9d17e627a64b65a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86a35d07-53d3-46b3-92cb-ae34236c0f41", "external-id": "nsx-vlan-transportzone-811", "segmentation_id": 811, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap742f3021-31", "ovs_interfaceid": "742f3021-311f-4b36-9507-03a493f2b49f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 608.240071] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: ed10d511-dbed-4884-8ac6-f737173f62c5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c1:17:c3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '86a35d07-53d3-46b3-92cb-ae34236c0f41', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '742f3021-311f-4b36-9507-03a493f2b49f', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 608.249027] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Creating folder: Project (cdc479a290524130b9d17e627a64b65a). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 608.250809] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-96f6c038-c3ec-4b4b-8f99-a823804042c4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.266152] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Created folder: Project (cdc479a290524130b9d17e627a64b65a) in parent group-v653649. [ 608.266364] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Creating folder: Instances. Parent ref: group-v653672. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 608.266658] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-92e976a1-bef0-479f-92ec-d55ab087105f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.288378] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Created folder: Instances in parent group-v653672. [ 608.288758] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 608.288976] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ed10d511-dbed-4884-8ac6-f737173f62c5] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 608.289248] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-21f95e86-e544-4bd4-9bc3-9a58f4d9f4eb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.307962] env[69328]: DEBUG nova.policy [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd8eb944069844f34b3c2154e011c591a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4c353c4bd87647548297e8b8553a48e3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 608.315017] env[69328]: DEBUG oslo_concurrency.lockutils [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Acquiring lock "46526210-2783-408d-9ecb-773f33ff0c66" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 608.315339] env[69328]: DEBUG oslo_concurrency.lockutils [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Lock "46526210-2783-408d-9ecb-773f33ff0c66" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 608.319648] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 608.319648] env[69328]: value = "task-3272692" [ 608.319648] env[69328]: _type = "Task" [ 608.319648] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.329651] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272692, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.373072] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272688, 'name': CreateVM_Task, 'duration_secs': 1.490673} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.373072] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 608.373072] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 608.373072] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 608.373376] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 608.373560] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06191a47-6be2-47f8-9399-2291fd2987c8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.379408] env[69328]: DEBUG oslo_vmware.api [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Waiting for the task: (returnval){ [ 608.379408] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52532d6f-b6eb-d97b-8fde-1cb2389432fe" [ 608.379408] env[69328]: _type = "Task" [ 608.379408] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.392862] env[69328]: DEBUG oslo_vmware.api [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52532d6f-b6eb-d97b-8fde-1cb2389432fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.407943] env[69328]: DEBUG oslo_vmware.api [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Task: {'id': task-3272689, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.885789} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.408242] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] caba3b5c-db15-4de6-8d3d-41f6751f1b83/caba3b5c-db15-4de6-8d3d-41f6751f1b83.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 608.408487] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 608.410032] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4c991d5b-c518-4480-871e-f2a64e86afda {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.419875] env[69328]: DEBUG oslo_vmware.api [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Waiting for the task: (returnval){ [ 608.419875] env[69328]: value = "task-3272693" [ 608.419875] env[69328]: _type = "Task" [ 608.419875] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.430858] env[69328]: DEBUG oslo_vmware.api [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Task: {'id': task-3272693, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.444235] env[69328]: DEBUG oslo_vmware.api [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]524b1deb-11d8-8e50-fae5-c9d55b66010f, 'name': SearchDatastore_Task, 'duration_secs': 0.017425} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.445043] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 608.445043] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8/d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 608.445187] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d9ddd4f2-8018-4d95-a178-ce4be1763b33 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.455229] env[69328]: DEBUG oslo_vmware.api [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Waiting for the task: (returnval){ [ 608.455229] env[69328]: value = "task-3272694" [ 608.455229] env[69328]: _type = "Task" [ 608.455229] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.467212] env[69328]: DEBUG oslo_vmware.api [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3272694, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.701467] env[69328]: DEBUG nova.compute.manager [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 608.793338] env[69328]: DEBUG nova.network.neutron [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Successfully created port: 369bd8af-cb0d-49c0-b41e-69689c57cc0a {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 608.836071] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272692, 'name': CreateVM_Task, 'duration_secs': 0.504255} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.836269] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ed10d511-dbed-4884-8ac6-f737173f62c5] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 608.837342] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 608.905652] env[69328]: DEBUG oslo_vmware.api [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52532d6f-b6eb-d97b-8fde-1cb2389432fe, 'name': SearchDatastore_Task, 'duration_secs': 0.012252} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.905652] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 608.908908] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 608.908908] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 608.908908] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 608.908908] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 608.913028] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 608.913028] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 608.913028] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0d4b5e7e-8a0e-4dc0-804d-be793e1d5372 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.913998] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f10851c4-f8c2-4410-b6e3-3a12e54cc0c1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.926140] env[69328]: DEBUG oslo_vmware.api [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 608.926140] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5251ad94-2e1e-2a91-897e-c3f6dfc574f2" [ 608.926140] env[69328]: _type = "Task" [ 608.926140] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.938607] env[69328]: DEBUG oslo_vmware.api [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Task: {'id': task-3272693, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078497} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.939107] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 608.942109] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 608.944199] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 608.944599] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-84bed0d6-c373-4135-9f56-71c0a473f7bf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.952518] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53535220-d355-4a46-82bc-84aec866e25d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.956335] env[69328]: DEBUG oslo_vmware.api [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5251ad94-2e1e-2a91-897e-c3f6dfc574f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.964525] env[69328]: DEBUG oslo_vmware.api [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Waiting for the task: (returnval){ [ 608.964525] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52724329-6f42-2465-31f4-e3f5cd14afea" [ 608.964525] env[69328]: _type = "Task" [ 608.964525] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.989398] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Reconfiguring VM instance instance-00000006 to attach disk [datastore1] caba3b5c-db15-4de6-8d3d-41f6751f1b83/caba3b5c-db15-4de6-8d3d-41f6751f1b83.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 608.999184] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-98e392b4-9524-4838-8715-d70bd4d630ca {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.015410] env[69328]: DEBUG oslo_vmware.api [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3272694, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.023809] env[69328]: DEBUG oslo_vmware.api [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52724329-6f42-2465-31f4-e3f5cd14afea, 'name': SearchDatastore_Task, 'duration_secs': 0.056744} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.026233] env[69328]: DEBUG oslo_vmware.api [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Waiting for the task: (returnval){ [ 609.026233] env[69328]: value = "task-3272695" [ 609.026233] env[69328]: _type = "Task" [ 609.026233] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.026517] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c50be705-0d14-4fbe-8751-fe390bb28f2f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.040370] env[69328]: DEBUG oslo_vmware.api [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Waiting for the task: (returnval){ [ 609.040370] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5233b0aa-8dcc-a064-dc8d-eb8ef6fbf2e6" [ 609.040370] env[69328]: _type = "Task" [ 609.040370] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.046199] env[69328]: DEBUG oslo_vmware.api [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Task: {'id': task-3272695, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.056691] env[69328]: DEBUG oslo_vmware.api [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5233b0aa-8dcc-a064-dc8d-eb8ef6fbf2e6, 'name': SearchDatastore_Task, 'duration_secs': 0.014195} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.056982] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 609.057343] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] d97dc6d5-e55f-4b9e-91e6-cfdea82f5236/d97dc6d5-e55f-4b9e-91e6-cfdea82f5236.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 609.057667] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-175c26cb-22c9-4940-88ed-349c86e51122 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.071085] env[69328]: DEBUG oslo_vmware.api [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Waiting for the task: (returnval){ [ 609.071085] env[69328]: value = "task-3272696" [ 609.071085] env[69328]: _type = "Task" [ 609.071085] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.081855] env[69328]: DEBUG oslo_vmware.api [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Task: {'id': task-3272696, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.165477] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3527007-7089-43bb-a421-1e1c1c62c6b4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.174392] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4f68509-6c09-4323-96db-f177ec46169d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.226273] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13b0ca49-55be-440b-9b09-5cfb0e66b3c8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.240493] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c355c994-4104-481e-af14-2d9ef883fcf5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.248639] env[69328]: DEBUG nova.compute.manager [req-beb4a7cd-a221-471e-921c-0428911257dd req-c3479b92-56e5-4492-a15c-96c007017610 service nova] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] Received event network-vif-plugged-b801ae0c-2061-4103-8530-3d58f8785333 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 609.249141] env[69328]: DEBUG oslo_concurrency.lockutils [req-beb4a7cd-a221-471e-921c-0428911257dd req-c3479b92-56e5-4492-a15c-96c007017610 service nova] Acquiring lock "d97dc6d5-e55f-4b9e-91e6-cfdea82f5236-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 609.249141] env[69328]: DEBUG oslo_concurrency.lockutils [req-beb4a7cd-a221-471e-921c-0428911257dd req-c3479b92-56e5-4492-a15c-96c007017610 service nova] Lock "d97dc6d5-e55f-4b9e-91e6-cfdea82f5236-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 609.249267] env[69328]: DEBUG oslo_concurrency.lockutils [req-beb4a7cd-a221-471e-921c-0428911257dd req-c3479b92-56e5-4492-a15c-96c007017610 service nova] Lock "d97dc6d5-e55f-4b9e-91e6-cfdea82f5236-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 609.249303] env[69328]: DEBUG nova.compute.manager [req-beb4a7cd-a221-471e-921c-0428911257dd req-c3479b92-56e5-4492-a15c-96c007017610 service nova] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] No waiting events found dispatching network-vif-plugged-b801ae0c-2061-4103-8530-3d58f8785333 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 609.249538] env[69328]: WARNING nova.compute.manager [req-beb4a7cd-a221-471e-921c-0428911257dd req-c3479b92-56e5-4492-a15c-96c007017610 service nova] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] Received unexpected event network-vif-plugged-b801ae0c-2061-4103-8530-3d58f8785333 for instance with vm_state building and task_state spawning. [ 609.249739] env[69328]: DEBUG nova.compute.manager [req-beb4a7cd-a221-471e-921c-0428911257dd req-c3479b92-56e5-4492-a15c-96c007017610 service nova] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] Received event network-changed-b801ae0c-2061-4103-8530-3d58f8785333 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 609.249814] env[69328]: DEBUG nova.compute.manager [req-beb4a7cd-a221-471e-921c-0428911257dd req-c3479b92-56e5-4492-a15c-96c007017610 service nova] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] Refreshing instance network info cache due to event network-changed-b801ae0c-2061-4103-8530-3d58f8785333. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 609.249995] env[69328]: DEBUG oslo_concurrency.lockutils [req-beb4a7cd-a221-471e-921c-0428911257dd req-c3479b92-56e5-4492-a15c-96c007017610 service nova] Acquiring lock "refresh_cache-d97dc6d5-e55f-4b9e-91e6-cfdea82f5236" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 609.250178] env[69328]: DEBUG oslo_concurrency.lockutils [req-beb4a7cd-a221-471e-921c-0428911257dd req-c3479b92-56e5-4492-a15c-96c007017610 service nova] Acquired lock "refresh_cache-d97dc6d5-e55f-4b9e-91e6-cfdea82f5236" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 609.250336] env[69328]: DEBUG nova.network.neutron [req-beb4a7cd-a221-471e-921c-0428911257dd req-c3479b92-56e5-4492-a15c-96c007017610 service nova] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] Refreshing network info cache for port b801ae0c-2061-4103-8530-3d58f8785333 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 609.265447] env[69328]: DEBUG nova.compute.provider_tree [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 609.438564] env[69328]: DEBUG oslo_vmware.api [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5251ad94-2e1e-2a91-897e-c3f6dfc574f2, 'name': SearchDatastore_Task, 'duration_secs': 0.058981} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.438869] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 609.439120] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: ed10d511-dbed-4884-8ac6-f737173f62c5] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 609.439360] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 609.439539] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 609.439740] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 609.440013] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-32cfe75a-b709-4060-9992-5efb47b0ac8f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.455979] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 609.456197] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 609.456990] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47dfec4a-9072-4ddd-8d0b-3745fe4d0dfb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.466261] env[69328]: DEBUG oslo_vmware.api [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 609.466261] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52132c2e-285b-909a-aebe-2d8d0db4b91f" [ 609.466261] env[69328]: _type = "Task" [ 609.466261] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.472146] env[69328]: DEBUG oslo_vmware.api [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3272694, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.593007} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.477253] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8/d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 609.477492] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 609.477776] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4ce17a68-7738-4a34-a5c8-f44cc23afb47 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.489262] env[69328]: DEBUG oslo_vmware.api [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52132c2e-285b-909a-aebe-2d8d0db4b91f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.491368] env[69328]: DEBUG oslo_vmware.api [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Waiting for the task: (returnval){ [ 609.491368] env[69328]: value = "task-3272697" [ 609.491368] env[69328]: _type = "Task" [ 609.491368] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.505387] env[69328]: DEBUG oslo_vmware.api [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3272697, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.541451] env[69328]: DEBUG oslo_vmware.api [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Task: {'id': task-3272695, 'name': ReconfigVM_Task, 'duration_secs': 0.32582} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.545521] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Reconfigured VM instance instance-00000006 to attach disk [datastore1] caba3b5c-db15-4de6-8d3d-41f6751f1b83/caba3b5c-db15-4de6-8d3d-41f6751f1b83.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 609.545521] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0f8124bd-c749-4f1f-95b3-bdd795ac8c24 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.554122] env[69328]: DEBUG oslo_vmware.api [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Waiting for the task: (returnval){ [ 609.554122] env[69328]: value = "task-3272698" [ 609.554122] env[69328]: _type = "Task" [ 609.554122] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.567264] env[69328]: DEBUG oslo_vmware.api [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Task: {'id': task-3272698, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.582587] env[69328]: DEBUG oslo_vmware.api [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Task: {'id': task-3272696, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.595187] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Acquiring lock "e92953f4-b634-4ef9-a5ad-63a886cfa007" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 609.595511] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Lock "e92953f4-b634-4ef9-a5ad-63a886cfa007" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 609.683530] env[69328]: DEBUG nova.network.neutron [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Successfully updated port: 09c4fb65-f87f-4fdc-9a85-cf73224a3ca3 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 609.731831] env[69328]: DEBUG nova.compute.manager [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 609.769911] env[69328]: DEBUG nova.virt.hardware [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 609.770210] env[69328]: DEBUG nova.virt.hardware [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 609.770361] env[69328]: DEBUG nova.virt.hardware [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 609.771615] env[69328]: DEBUG nova.virt.hardware [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 609.774926] env[69328]: DEBUG nova.virt.hardware [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 609.774926] env[69328]: DEBUG nova.virt.hardware [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 609.774926] env[69328]: DEBUG nova.virt.hardware [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 609.774926] env[69328]: DEBUG nova.virt.hardware [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 609.774926] env[69328]: DEBUG nova.virt.hardware [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 609.775415] env[69328]: DEBUG nova.virt.hardware [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 609.775415] env[69328]: DEBUG nova.virt.hardware [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 609.775415] env[69328]: DEBUG nova.scheduler.client.report [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 609.781415] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e795be8-e2e7-4b36-8614-031cfdf6d5b8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.797177] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86187068-85ee-4ec7-b6d3-a342a6f6aff5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.988032] env[69328]: DEBUG oslo_vmware.api [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52132c2e-285b-909a-aebe-2d8d0db4b91f, 'name': SearchDatastore_Task, 'duration_secs': 0.042811} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.988800] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-158c70d8-1cb0-4145-b5c0-5edff7d8cbdd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.000073] env[69328]: DEBUG oslo_vmware.api [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 610.000073] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]525ee452-1ca3-7a3d-d28f-d21218e6dc50" [ 610.000073] env[69328]: _type = "Task" [ 610.000073] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.007670] env[69328]: DEBUG oslo_vmware.api [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3272697, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074966} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.010837] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 610.014773] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d5b6078-09bd-4897-a09f-2d504d627ade {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.025891] env[69328]: DEBUG oslo_vmware.api [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]525ee452-1ca3-7a3d-d28f-d21218e6dc50, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.050243] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Reconfiguring VM instance instance-00000007 to attach disk [datastore1] d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8/d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 610.051155] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4b43d90d-5b80-4ba8-ae8f-cf77645b45d6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.084958] env[69328]: DEBUG oslo_vmware.api [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Task: {'id': task-3272698, 'name': Rename_Task, 'duration_secs': 0.175936} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.090159] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 610.090517] env[69328]: DEBUG oslo_vmware.api [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Waiting for the task: (returnval){ [ 610.090517] env[69328]: value = "task-3272699" [ 610.090517] env[69328]: _type = "Task" [ 610.090517] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.090715] env[69328]: DEBUG oslo_vmware.api [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Task: {'id': task-3272696, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.966815} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.091563] env[69328]: DEBUG nova.network.neutron [req-beb4a7cd-a221-471e-921c-0428911257dd req-c3479b92-56e5-4492-a15c-96c007017610 service nova] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] Updated VIF entry in instance network info cache for port b801ae0c-2061-4103-8530-3d58f8785333. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 610.091878] env[69328]: DEBUG nova.network.neutron [req-beb4a7cd-a221-471e-921c-0428911257dd req-c3479b92-56e5-4492-a15c-96c007017610 service nova] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] Updating instance_info_cache with network_info: [{"id": "b801ae0c-2061-4103-8530-3d58f8785333", "address": "fa:16:3e:09:6a:91", "network": {"id": "4707aea3-ce46-4fe0-bf5c-141ee5596a86", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.186", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ecd184c6b78b4e4297fb93abb94aa37d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb801ae0c-20", "ovs_interfaceid": "b801ae0c-2061-4103-8530-3d58f8785333", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 610.093414] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-77fc619f-4302-4e72-a179-2822bc98d2d1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.095502] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] d97dc6d5-e55f-4b9e-91e6-cfdea82f5236/d97dc6d5-e55f-4b9e-91e6-cfdea82f5236.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 610.095626] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 610.099379] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c8e5bf11-5801-43a7-ace3-38f5a5d47729 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.108199] env[69328]: DEBUG oslo_vmware.api [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3272699, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.112519] env[69328]: DEBUG oslo_vmware.api [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Waiting for the task: (returnval){ [ 610.112519] env[69328]: value = "task-3272700" [ 610.112519] env[69328]: _type = "Task" [ 610.112519] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.112768] env[69328]: DEBUG oslo_vmware.api [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Waiting for the task: (returnval){ [ 610.112768] env[69328]: value = "task-3272701" [ 610.112768] env[69328]: _type = "Task" [ 610.112768] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.133768] env[69328]: DEBUG oslo_vmware.api [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Task: {'id': task-3272700, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.134045] env[69328]: DEBUG oslo_vmware.api [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Task: {'id': task-3272701, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.189941] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Acquiring lock "refresh_cache-d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 610.190252] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Acquired lock "refresh_cache-d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 610.190540] env[69328]: DEBUG nova.network.neutron [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 610.266216] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Acquiring lock "9753734d-90f0-4661-8029-ec312e88eb60" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 610.266474] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Lock "9753734d-90f0-4661-8029-ec312e88eb60" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 610.286944] env[69328]: DEBUG oslo_concurrency.lockutils [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.596s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 610.287536] env[69328]: DEBUG nova.compute.manager [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 610.290904] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 5.099s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 610.458292] env[69328]: DEBUG oslo_concurrency.lockutils [None req-aec69d4d-dbfb-4864-9814-8fe245f37e57 tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Acquiring lock "49a668a7-5967-46a9-823f-7f613d34d152" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 610.458655] env[69328]: DEBUG oslo_concurrency.lockutils [None req-aec69d4d-dbfb-4864-9814-8fe245f37e57 tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Lock "49a668a7-5967-46a9-823f-7f613d34d152" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 610.458899] env[69328]: DEBUG oslo_concurrency.lockutils [None req-aec69d4d-dbfb-4864-9814-8fe245f37e57 tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Acquiring lock "49a668a7-5967-46a9-823f-7f613d34d152-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 610.459119] env[69328]: DEBUG oslo_concurrency.lockutils [None req-aec69d4d-dbfb-4864-9814-8fe245f37e57 tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Lock "49a668a7-5967-46a9-823f-7f613d34d152-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 610.459303] env[69328]: DEBUG oslo_concurrency.lockutils [None req-aec69d4d-dbfb-4864-9814-8fe245f37e57 tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Lock "49a668a7-5967-46a9-823f-7f613d34d152-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 610.462708] env[69328]: INFO nova.compute.manager [None req-aec69d4d-dbfb-4864-9814-8fe245f37e57 tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] Terminating instance [ 610.518035] env[69328]: DEBUG oslo_vmware.api [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]525ee452-1ca3-7a3d-d28f-d21218e6dc50, 'name': SearchDatastore_Task, 'duration_secs': 0.029602} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.518319] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 610.518575] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] ed10d511-dbed-4884-8ac6-f737173f62c5/ed10d511-dbed-4884-8ac6-f737173f62c5.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 610.518880] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1f7af49a-1cd8-44df-a0aa-f1c4c2ccc539 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.532517] env[69328]: DEBUG oslo_vmware.api [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 610.532517] env[69328]: value = "task-3272702" [ 610.532517] env[69328]: _type = "Task" [ 610.532517] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.545814] env[69328]: DEBUG oslo_vmware.api [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3272702, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.603993] env[69328]: DEBUG oslo_concurrency.lockutils [req-beb4a7cd-a221-471e-921c-0428911257dd req-c3479b92-56e5-4492-a15c-96c007017610 service nova] Releasing lock "refresh_cache-d97dc6d5-e55f-4b9e-91e6-cfdea82f5236" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 610.613876] env[69328]: DEBUG oslo_vmware.api [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3272699, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.629964] env[69328]: DEBUG oslo_vmware.api [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Task: {'id': task-3272700, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.634977] env[69328]: DEBUG oslo_vmware.api [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Task: {'id': task-3272701, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077441} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.635234] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 610.637253] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f759d384-c8a6-4998-ba11-a98b7cb84389 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.694058] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] Reconfiguring VM instance instance-00000008 to attach disk [datastore1] d97dc6d5-e55f-4b9e-91e6-cfdea82f5236/d97dc6d5-e55f-4b9e-91e6-cfdea82f5236.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 610.697938] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4dffdfd2-07ee-4efd-bf5e-bbffdc8f371a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.724185] env[69328]: DEBUG oslo_vmware.api [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Waiting for the task: (returnval){ [ 610.724185] env[69328]: value = "task-3272703" [ 610.724185] env[69328]: _type = "Task" [ 610.724185] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.736175] env[69328]: DEBUG oslo_vmware.api [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Task: {'id': task-3272703, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.785295] env[69328]: DEBUG nova.network.neutron [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 610.793992] env[69328]: DEBUG nova.compute.utils [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 610.795636] env[69328]: DEBUG nova.compute.manager [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Not allocating networking since 'none' was specified. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 610.967085] env[69328]: DEBUG nova.compute.manager [None req-aec69d4d-dbfb-4864-9814-8fe245f37e57 tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 610.967982] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-aec69d4d-dbfb-4864-9814-8fe245f37e57 tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 610.968724] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-459dc7b9-cbb8-4c28-9cb6-646b4ffb57cc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.979623] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-aec69d4d-dbfb-4864-9814-8fe245f37e57 tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 610.979623] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ca7b32e1-3a16-41e0-b12e-e79cbf800050 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.987639] env[69328]: DEBUG oslo_vmware.api [None req-aec69d4d-dbfb-4864-9814-8fe245f37e57 tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Waiting for the task: (returnval){ [ 610.987639] env[69328]: value = "task-3272704" [ 610.987639] env[69328]: _type = "Task" [ 610.987639] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.999924] env[69328]: DEBUG oslo_vmware.api [None req-aec69d4d-dbfb-4864-9814-8fe245f37e57 tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Task: {'id': task-3272704, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.049913] env[69328]: DEBUG oslo_vmware.api [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3272702, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.108763] env[69328]: DEBUG oslo_vmware.api [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3272699, 'name': ReconfigVM_Task, 'duration_secs': 0.928318} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.109098] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Reconfigured VM instance instance-00000007 to attach disk [datastore1] d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8/d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 611.109920] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-79936ace-24ec-40b7-b182-69ce1cc61eed {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.120540] env[69328]: DEBUG oslo_vmware.api [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Waiting for the task: (returnval){ [ 611.120540] env[69328]: value = "task-3272705" [ 611.120540] env[69328]: _type = "Task" [ 611.120540] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.139562] env[69328]: DEBUG oslo_vmware.api [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Task: {'id': task-3272700, 'name': PowerOnVM_Task, 'duration_secs': 0.561671} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.145380] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 611.145682] env[69328]: INFO nova.compute.manager [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Took 13.71 seconds to spawn the instance on the hypervisor. [ 611.147301] env[69328]: DEBUG nova.compute.manager [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 611.147901] env[69328]: DEBUG oslo_vmware.api [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3272705, 'name': Rename_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.149369] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a3408e2-877e-47ba-97fd-fad3714a3240 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.154630] env[69328]: DEBUG nova.network.neutron [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Updating instance_info_cache with network_info: [{"id": "09c4fb65-f87f-4fdc-9a85-cf73224a3ca3", "address": "fa:16:3e:e6:08:a2", "network": {"id": "b7b15f77-0584-4f19-a05e-67df3efe1b9d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-778653716-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8502178b3d334c338b63dfde3eae8f08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d275d7c6-2a7b-4ee8-b6f4-fabf1ba1905f", "external-id": "nsx-vlan-transportzone-513", "segmentation_id": 513, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09c4fb65-f8", "ovs_interfaceid": "09c4fb65-f87f-4fdc-9a85-cf73224a3ca3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 611.217117] env[69328]: DEBUG oslo_concurrency.lockutils [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Acquiring lock "15a8de08-4d20-4329-9867-53e5dff82878" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 611.217667] env[69328]: DEBUG oslo_concurrency.lockutils [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Lock "15a8de08-4d20-4329-9867-53e5dff82878" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 611.227835] env[69328]: DEBUG nova.network.neutron [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Successfully updated port: 369bd8af-cb0d-49c0-b41e-69689c57cc0a {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 611.245470] env[69328]: DEBUG oslo_vmware.api [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Task: {'id': task-3272703, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.297571] env[69328]: DEBUG nova.compute.manager [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 611.335084] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 88f9f0c2-0c55-45bf-a494-8f1ee4922443 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 611.335462] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 676173ee-8001-48c6-bd28-09130f6dd99a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 611.335462] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 49a668a7-5967-46a9-823f-7f613d34d152 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 611.335616] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 230c6278-65af-4f5d-b817-0b695086c29d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 611.335686] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 50b84adc-5ff3-4a1e-a09f-5c96daef9b87 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 611.335746] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 611.335882] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance caba3b5c-db15-4de6-8d3d-41f6751f1b83 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 611.336017] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance d97dc6d5-e55f-4b9e-91e6-cfdea82f5236 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 611.336160] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance ed10d511-dbed-4884-8ac6-f737173f62c5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 611.336368] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 611.336586] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance a798c3f2-ccde-488e-8a14-21f4a04f8e12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 611.336648] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 26feb2d1-ff64-4a13-af83-b6d5fe4348e1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 611.499268] env[69328]: DEBUG oslo_vmware.api [None req-aec69d4d-dbfb-4864-9814-8fe245f37e57 tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Task: {'id': task-3272704, 'name': PowerOffVM_Task, 'duration_secs': 0.263119} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.499584] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-aec69d4d-dbfb-4864-9814-8fe245f37e57 tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 611.499792] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-aec69d4d-dbfb-4864-9814-8fe245f37e57 tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 611.500422] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5eb15569-a7b5-4610-bac5-7fd445e190ed {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.545454] env[69328]: DEBUG oslo_vmware.api [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3272702, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.870656} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.545728] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] ed10d511-dbed-4884-8ac6-f737173f62c5/ed10d511-dbed-4884-8ac6-f737173f62c5.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 611.545945] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: ed10d511-dbed-4884-8ac6-f737173f62c5] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 611.546273] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dabe3efd-7901-4a05-b29a-f82285702684 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.556538] env[69328]: DEBUG oslo_vmware.api [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 611.556538] env[69328]: value = "task-3272707" [ 611.556538] env[69328]: _type = "Task" [ 611.556538] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.567706] env[69328]: DEBUG oslo_vmware.api [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3272707, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.614024] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-aec69d4d-dbfb-4864-9814-8fe245f37e57 tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 611.614024] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-aec69d4d-dbfb-4864-9814-8fe245f37e57 tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 611.614024] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-aec69d4d-dbfb-4864-9814-8fe245f37e57 tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Deleting the datastore file [datastore2] 49a668a7-5967-46a9-823f-7f613d34d152 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 611.614024] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-19a01b4d-78c6-4d49-9a31-8e788ee839f3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.622383] env[69328]: DEBUG nova.compute.manager [req-3b36db6b-cd33-4855-a89c-743b34bc6ee9 req-ca8c27ca-d815-4a66-a3ec-e9e789b4ed6b service nova] [instance: ed10d511-dbed-4884-8ac6-f737173f62c5] Received event network-changed-742f3021-311f-4b36-9507-03a493f2b49f {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 611.622712] env[69328]: DEBUG nova.compute.manager [req-3b36db6b-cd33-4855-a89c-743b34bc6ee9 req-ca8c27ca-d815-4a66-a3ec-e9e789b4ed6b service nova] [instance: ed10d511-dbed-4884-8ac6-f737173f62c5] Refreshing instance network info cache due to event network-changed-742f3021-311f-4b36-9507-03a493f2b49f. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 611.624109] env[69328]: DEBUG oslo_concurrency.lockutils [req-3b36db6b-cd33-4855-a89c-743b34bc6ee9 req-ca8c27ca-d815-4a66-a3ec-e9e789b4ed6b service nova] Acquiring lock "refresh_cache-ed10d511-dbed-4884-8ac6-f737173f62c5" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 611.624109] env[69328]: DEBUG oslo_concurrency.lockutils [req-3b36db6b-cd33-4855-a89c-743b34bc6ee9 req-ca8c27ca-d815-4a66-a3ec-e9e789b4ed6b service nova] Acquired lock "refresh_cache-ed10d511-dbed-4884-8ac6-f737173f62c5" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 611.624678] env[69328]: DEBUG nova.network.neutron [req-3b36db6b-cd33-4855-a89c-743b34bc6ee9 req-ca8c27ca-d815-4a66-a3ec-e9e789b4ed6b service nova] [instance: ed10d511-dbed-4884-8ac6-f737173f62c5] Refreshing network info cache for port 742f3021-311f-4b36-9507-03a493f2b49f {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 611.635451] env[69328]: DEBUG oslo_vmware.api [None req-aec69d4d-dbfb-4864-9814-8fe245f37e57 tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Waiting for the task: (returnval){ [ 611.635451] env[69328]: value = "task-3272708" [ 611.635451] env[69328]: _type = "Task" [ 611.635451] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.645900] env[69328]: DEBUG oslo_vmware.api [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3272705, 'name': Rename_Task, 'duration_secs': 0.414628} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.646484] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 611.646661] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6289ac45-aba4-4fe6-824f-fa918ed3af67 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.654606] env[69328]: DEBUG oslo_vmware.api [None req-aec69d4d-dbfb-4864-9814-8fe245f37e57 tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Task: {'id': task-3272708, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.657698] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Releasing lock "refresh_cache-d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 611.661072] env[69328]: DEBUG nova.compute.manager [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Instance network_info: |[{"id": "09c4fb65-f87f-4fdc-9a85-cf73224a3ca3", "address": "fa:16:3e:e6:08:a2", "network": {"id": "b7b15f77-0584-4f19-a05e-67df3efe1b9d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-778653716-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8502178b3d334c338b63dfde3eae8f08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d275d7c6-2a7b-4ee8-b6f4-fabf1ba1905f", "external-id": "nsx-vlan-transportzone-513", "segmentation_id": 513, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09c4fb65-f8", "ovs_interfaceid": "09c4fb65-f87f-4fdc-9a85-cf73224a3ca3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 611.661406] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e6:08:a2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd275d7c6-2a7b-4ee8-b6f4-fabf1ba1905f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '09c4fb65-f87f-4fdc-9a85-cf73224a3ca3', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 611.667055] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Creating folder: Project (8502178b3d334c338b63dfde3eae8f08). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 611.669019] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-89cbd88b-de91-4714-a434-023991065611 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.670936] env[69328]: DEBUG oslo_vmware.api [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Waiting for the task: (returnval){ [ 611.670936] env[69328]: value = "task-3272709" [ 611.670936] env[69328]: _type = "Task" [ 611.670936] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.684974] env[69328]: INFO nova.compute.manager [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Took 28.01 seconds to build instance. [ 611.687486] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Created folder: Project (8502178b3d334c338b63dfde3eae8f08) in parent group-v653649. [ 611.688053] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Creating folder: Instances. Parent ref: group-v653675. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 611.692053] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7c4c9c95-60c9-42bc-b880-b745383c5610 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.694429] env[69328]: DEBUG oslo_vmware.api [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3272709, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.705480] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Created folder: Instances in parent group-v653675. [ 611.705809] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 611.705961] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 611.706161] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e60a8454-0b9b-45fa-a30c-87fb5858bedc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.729977] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 611.729977] env[69328]: value = "task-3272712" [ 611.729977] env[69328]: _type = "Task" [ 611.729977] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.734098] env[69328]: DEBUG oslo_concurrency.lockutils [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Acquiring lock "refresh_cache-a798c3f2-ccde-488e-8a14-21f4a04f8e12" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 611.735238] env[69328]: DEBUG oslo_concurrency.lockutils [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Acquired lock "refresh_cache-a798c3f2-ccde-488e-8a14-21f4a04f8e12" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 611.735415] env[69328]: DEBUG nova.network.neutron [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 611.751247] env[69328]: DEBUG oslo_vmware.api [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Task: {'id': task-3272703, 'name': ReconfigVM_Task, 'duration_secs': 0.673282} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.751447] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272712, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.752519] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] Reconfigured VM instance instance-00000008 to attach disk [datastore1] d97dc6d5-e55f-4b9e-91e6-cfdea82f5236/d97dc6d5-e55f-4b9e-91e6-cfdea82f5236.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 611.753222] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4a7b902a-b818-40a1-9a31-751eb4d295b5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.762202] env[69328]: DEBUG oslo_vmware.api [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Waiting for the task: (returnval){ [ 611.762202] env[69328]: value = "task-3272713" [ 611.762202] env[69328]: _type = "Task" [ 611.762202] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.776982] env[69328]: DEBUG oslo_vmware.api [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Task: {'id': task-3272713, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.840864] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance edb1a21a-6907-4198-a977-c1213e8fecc0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 612.074332] env[69328]: DEBUG oslo_vmware.api [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3272707, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074171} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.074332] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: ed10d511-dbed-4884-8ac6-f737173f62c5] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 612.075853] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09ec256c-6bb9-401d-bfd6-61976e66ea26 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.102724] env[69328]: DEBUG oslo_concurrency.lockutils [None req-423c7c80-1271-4098-b5b6-1229ad381bd9 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Acquiring lock "50b84adc-5ff3-4a1e-a09f-5c96daef9b87" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 612.103188] env[69328]: DEBUG oslo_concurrency.lockutils [None req-423c7c80-1271-4098-b5b6-1229ad381bd9 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Lock "50b84adc-5ff3-4a1e-a09f-5c96daef9b87" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 612.103472] env[69328]: DEBUG oslo_concurrency.lockutils [None req-423c7c80-1271-4098-b5b6-1229ad381bd9 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Acquiring lock "50b84adc-5ff3-4a1e-a09f-5c96daef9b87-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 612.103740] env[69328]: DEBUG oslo_concurrency.lockutils [None req-423c7c80-1271-4098-b5b6-1229ad381bd9 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Lock "50b84adc-5ff3-4a1e-a09f-5c96daef9b87-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 612.104203] env[69328]: DEBUG oslo_concurrency.lockutils [None req-423c7c80-1271-4098-b5b6-1229ad381bd9 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Lock "50b84adc-5ff3-4a1e-a09f-5c96daef9b87-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 612.125188] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: ed10d511-dbed-4884-8ac6-f737173f62c5] Reconfiguring VM instance instance-00000009 to attach disk [datastore1] ed10d511-dbed-4884-8ac6-f737173f62c5/ed10d511-dbed-4884-8ac6-f737173f62c5.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 612.125942] env[69328]: INFO nova.compute.manager [None req-423c7c80-1271-4098-b5b6-1229ad381bd9 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 50b84adc-5ff3-4a1e-a09f-5c96daef9b87] Terminating instance [ 612.128052] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d09a61cb-fb73-497c-b7a5-f86a4c4d57d7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.164446] env[69328]: DEBUG oslo_vmware.api [None req-aec69d4d-dbfb-4864-9814-8fe245f37e57 tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Task: {'id': task-3272708, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.241779} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.167065] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-aec69d4d-dbfb-4864-9814-8fe245f37e57 tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 612.167343] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-aec69d4d-dbfb-4864-9814-8fe245f37e57 tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 612.167560] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-aec69d4d-dbfb-4864-9814-8fe245f37e57 tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 612.167831] env[69328]: INFO nova.compute.manager [None req-aec69d4d-dbfb-4864-9814-8fe245f37e57 tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] Took 1.20 seconds to destroy the instance on the hypervisor. [ 612.168202] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-aec69d4d-dbfb-4864-9814-8fe245f37e57 tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 612.168860] env[69328]: DEBUG oslo_vmware.api [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 612.168860] env[69328]: value = "task-3272714" [ 612.168860] env[69328]: _type = "Task" [ 612.168860] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.169868] env[69328]: DEBUG nova.compute.manager [-] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 612.169868] env[69328]: DEBUG nova.network.neutron [-] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 612.189403] env[69328]: DEBUG oslo_concurrency.lockutils [None req-124f4d74-4256-464c-abdd-d0cb3e48534d tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Lock "caba3b5c-db15-4de6-8d3d-41f6751f1b83" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.525s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 612.190065] env[69328]: DEBUG oslo_vmware.api [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3272714, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.194736] env[69328]: DEBUG oslo_vmware.api [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3272709, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.245718] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272712, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.274156] env[69328]: DEBUG oslo_vmware.api [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Task: {'id': task-3272713, 'name': Rename_Task, 'duration_secs': 0.179594} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.274553] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 612.275055] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6786906d-beca-43e7-83b4-3aaec4a19323 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.284518] env[69328]: DEBUG oslo_vmware.api [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Waiting for the task: (returnval){ [ 612.284518] env[69328]: value = "task-3272715" [ 612.284518] env[69328]: _type = "Task" [ 612.284518] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.294881] env[69328]: DEBUG oslo_vmware.api [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Task: {'id': task-3272715, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.312421] env[69328]: DEBUG nova.compute.manager [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 612.320271] env[69328]: DEBUG nova.network.neutron [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 612.347304] env[69328]: DEBUG nova.virt.hardware [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 612.347537] env[69328]: DEBUG nova.virt.hardware [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 612.347707] env[69328]: DEBUG nova.virt.hardware [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 612.347947] env[69328]: DEBUG nova.virt.hardware [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 612.348402] env[69328]: DEBUG nova.virt.hardware [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 612.348700] env[69328]: DEBUG nova.virt.hardware [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 612.348946] env[69328]: DEBUG nova.virt.hardware [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 612.349125] env[69328]: DEBUG nova.virt.hardware [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 612.349305] env[69328]: DEBUG nova.virt.hardware [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 612.349550] env[69328]: DEBUG nova.virt.hardware [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 612.350585] env[69328]: DEBUG nova.virt.hardware [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 612.350585] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 612.352830] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5c44007-0012-4e91-a80a-b4971b43bf3f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.365669] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffb19266-ba1d-4655-b3c9-2931172f6d46 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.385060] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Instance VIF info [] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 612.391669] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Creating folder: Project (09f19079aaa04ae39c355186f12cddf6). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 612.392841] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-99047de5-ff82-45bd-9aac-be59c4b7c4f9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.409409] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Created folder: Project (09f19079aaa04ae39c355186f12cddf6) in parent group-v653649. [ 612.411426] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Creating folder: Instances. Parent ref: group-v653678. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 612.411426] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-05b16b1c-988a-4ca4-a259-d4709ff0dafd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.426039] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Created folder: Instances in parent group-v653678. [ 612.426178] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 612.426362] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 612.426614] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8e77d291-3300-49f7-8a32-81879daeb01e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.449665] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 612.449665] env[69328]: value = "task-3272718" [ 612.449665] env[69328]: _type = "Task" [ 612.449665] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.463252] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272718, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.653839] env[69328]: DEBUG nova.compute.manager [None req-423c7c80-1271-4098-b5b6-1229ad381bd9 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 50b84adc-5ff3-4a1e-a09f-5c96daef9b87] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 612.654682] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-423c7c80-1271-4098-b5b6-1229ad381bd9 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 50b84adc-5ff3-4a1e-a09f-5c96daef9b87] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 612.655634] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57be6d8c-b448-4689-92cc-709613213575 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.666097] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-423c7c80-1271-4098-b5b6-1229ad381bd9 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 50b84adc-5ff3-4a1e-a09f-5c96daef9b87] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 612.668936] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ad6f3d18-cd06-4caa-ba66-813eb8d280ea {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.673515] env[69328]: DEBUG nova.compute.manager [req-1e5dbb3a-bad6-4570-8d8b-7f4aca583dc8 req-c743623b-20bf-4e60-8f14-cafce5382c18 service nova] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Received event network-changed-f00ec7b2-0d01-4e8c-b30b-50314520c094 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 612.673720] env[69328]: DEBUG nova.compute.manager [req-1e5dbb3a-bad6-4570-8d8b-7f4aca583dc8 req-c743623b-20bf-4e60-8f14-cafce5382c18 service nova] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Refreshing instance network info cache due to event network-changed-f00ec7b2-0d01-4e8c-b30b-50314520c094. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 612.674433] env[69328]: DEBUG oslo_concurrency.lockutils [req-1e5dbb3a-bad6-4570-8d8b-7f4aca583dc8 req-c743623b-20bf-4e60-8f14-cafce5382c18 service nova] Acquiring lock "refresh_cache-88f9f0c2-0c55-45bf-a494-8f1ee4922443" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 612.674433] env[69328]: DEBUG oslo_concurrency.lockutils [req-1e5dbb3a-bad6-4570-8d8b-7f4aca583dc8 req-c743623b-20bf-4e60-8f14-cafce5382c18 service nova] Acquired lock "refresh_cache-88f9f0c2-0c55-45bf-a494-8f1ee4922443" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 612.674433] env[69328]: DEBUG nova.network.neutron [req-1e5dbb3a-bad6-4570-8d8b-7f4aca583dc8 req-c743623b-20bf-4e60-8f14-cafce5382c18 service nova] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Refreshing network info cache for port f00ec7b2-0d01-4e8c-b30b-50314520c094 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 612.694887] env[69328]: DEBUG oslo_vmware.api [None req-423c7c80-1271-4098-b5b6-1229ad381bd9 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Waiting for the task: (returnval){ [ 612.694887] env[69328]: value = "task-3272719" [ 612.694887] env[69328]: _type = "Task" [ 612.694887] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.696569] env[69328]: DEBUG nova.compute.manager [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 612.716223] env[69328]: DEBUG oslo_vmware.api [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3272709, 'name': PowerOnVM_Task, 'duration_secs': 0.697585} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.716223] env[69328]: DEBUG oslo_vmware.api [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3272714, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.716223] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 612.716223] env[69328]: INFO nova.compute.manager [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Took 12.86 seconds to spawn the instance on the hypervisor. [ 612.716482] env[69328]: DEBUG nova.compute.manager [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 612.717431] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68d3bb15-e4c3-4907-b3a8-6ec303f45572 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.725097] env[69328]: DEBUG oslo_vmware.api [None req-423c7c80-1271-4098-b5b6-1229ad381bd9 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3272719, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.749141] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272712, 'name': CreateVM_Task, 'duration_secs': 0.516264} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.749948] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 612.750363] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 612.750582] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 612.750929] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 612.751228] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81394f37-7448-41b5-8195-b68dcfad853c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.759600] env[69328]: DEBUG oslo_vmware.api [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Waiting for the task: (returnval){ [ 612.759600] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52c73f21-1a84-683f-e193-845d34fe915a" [ 612.759600] env[69328]: _type = "Task" [ 612.759600] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.774509] env[69328]: DEBUG oslo_vmware.api [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52c73f21-1a84-683f-e193-845d34fe915a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.793619] env[69328]: DEBUG nova.network.neutron [req-3b36db6b-cd33-4855-a89c-743b34bc6ee9 req-ca8c27ca-d815-4a66-a3ec-e9e789b4ed6b service nova] [instance: ed10d511-dbed-4884-8ac6-f737173f62c5] Updated VIF entry in instance network info cache for port 742f3021-311f-4b36-9507-03a493f2b49f. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 612.793780] env[69328]: DEBUG nova.network.neutron [req-3b36db6b-cd33-4855-a89c-743b34bc6ee9 req-ca8c27ca-d815-4a66-a3ec-e9e789b4ed6b service nova] [instance: ed10d511-dbed-4884-8ac6-f737173f62c5] Updating instance_info_cache with network_info: [{"id": "742f3021-311f-4b36-9507-03a493f2b49f", "address": "fa:16:3e:c1:17:c3", "network": {"id": "620f277d-ca49-41da-83a0-afb8c393e26e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1223326239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cdc479a290524130b9d17e627a64b65a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86a35d07-53d3-46b3-92cb-ae34236c0f41", "external-id": "nsx-vlan-transportzone-811", "segmentation_id": 811, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap742f3021-31", "ovs_interfaceid": "742f3021-311f-4b36-9507-03a493f2b49f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 612.813126] env[69328]: DEBUG oslo_vmware.api [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Task: {'id': task-3272715, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.857440] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance f428f9a9-d792-4c1c-b2d4-ea066cc09d67 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 612.962149] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272718, 'name': CreateVM_Task, 'duration_secs': 0.393884} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.962332] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 612.962760] env[69328]: DEBUG oslo_concurrency.lockutils [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 613.087895] env[69328]: DEBUG nova.network.neutron [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Updating instance_info_cache with network_info: [{"id": "369bd8af-cb0d-49c0-b41e-69689c57cc0a", "address": "fa:16:3e:d2:78:c0", "network": {"id": "f5e5a30f-d08e-46d6-9b1f-18d5e4410095", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-967086728-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4c353c4bd87647548297e8b8553a48e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d377d75-3add-4a15-8691-74b2eb010924", "external-id": "nsx-vlan-transportzone-71", "segmentation_id": 71, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap369bd8af-cb", "ovs_interfaceid": "369bd8af-cb0d-49c0-b41e-69689c57cc0a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 613.198975] env[69328]: DEBUG oslo_vmware.api [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3272714, 'name': ReconfigVM_Task, 'duration_secs': 0.643935} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.203801] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: ed10d511-dbed-4884-8ac6-f737173f62c5] Reconfigured VM instance instance-00000009 to attach disk [datastore1] ed10d511-dbed-4884-8ac6-f737173f62c5/ed10d511-dbed-4884-8ac6-f737173f62c5.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 613.209518] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5c49aba9-06c3-47ca-bab2-52cba56630c2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.229327] env[69328]: DEBUG oslo_vmware.api [None req-423c7c80-1271-4098-b5b6-1229ad381bd9 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3272719, 'name': PowerOffVM_Task, 'duration_secs': 0.382235} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.230241] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-423c7c80-1271-4098-b5b6-1229ad381bd9 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 50b84adc-5ff3-4a1e-a09f-5c96daef9b87] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 613.230460] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-423c7c80-1271-4098-b5b6-1229ad381bd9 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 50b84adc-5ff3-4a1e-a09f-5c96daef9b87] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 613.230834] env[69328]: DEBUG oslo_vmware.api [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 613.230834] env[69328]: value = "task-3272720" [ 613.230834] env[69328]: _type = "Task" [ 613.230834] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.231057] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0b2f84d4-a769-4c09-a1dd-063ce12dba46 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.236577] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 613.251923] env[69328]: INFO nova.compute.manager [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Took 28.63 seconds to build instance. [ 613.262082] env[69328]: DEBUG oslo_vmware.api [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3272720, 'name': Rename_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.273990] env[69328]: DEBUG oslo_vmware.api [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52c73f21-1a84-683f-e193-845d34fe915a, 'name': SearchDatastore_Task, 'duration_secs': 0.019369} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.275116] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 613.275412] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 613.275691] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 613.275912] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 613.276211] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 613.276556] env[69328]: DEBUG oslo_concurrency.lockutils [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 613.276972] env[69328]: DEBUG oslo_concurrency.lockutils [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 613.277537] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-af3ca439-d3ff-49ee-a8a0-c2d941260b32 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.283733] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e688b8cf-154b-44e2-b172-62b24c5dc67b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.304167] env[69328]: DEBUG oslo_concurrency.lockutils [req-3b36db6b-cd33-4855-a89c-743b34bc6ee9 req-ca8c27ca-d815-4a66-a3ec-e9e789b4ed6b service nova] Releasing lock "refresh_cache-ed10d511-dbed-4884-8ac6-f737173f62c5" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 613.304167] env[69328]: DEBUG oslo_vmware.api [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Waiting for the task: (returnval){ [ 613.304167] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5279d47f-1fe1-c219-0b39-286c6050618a" [ 613.304167] env[69328]: _type = "Task" [ 613.304167] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.315587] env[69328]: DEBUG oslo_vmware.api [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5279d47f-1fe1-c219-0b39-286c6050618a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.320567] env[69328]: DEBUG oslo_vmware.api [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Task: {'id': task-3272715, 'name': PowerOnVM_Task, 'duration_secs': 0.647227} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.320953] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 613.321183] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 613.322420] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 613.322592] env[69328]: INFO nova.compute.manager [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] Took 11.08 seconds to spawn the instance on the hypervisor. [ 613.322826] env[69328]: DEBUG nova.compute.manager [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 613.323877] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-169ce74f-2030-47d7-af7a-ea353f87ba83 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.329250] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5432456-d030-45e3-ab03-87b81e70581e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.334733] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-423c7c80-1271-4098-b5b6-1229ad381bd9 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 50b84adc-5ff3-4a1e-a09f-5c96daef9b87] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 613.334983] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-423c7c80-1271-4098-b5b6-1229ad381bd9 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 50b84adc-5ff3-4a1e-a09f-5c96daef9b87] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 613.335252] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-423c7c80-1271-4098-b5b6-1229ad381bd9 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Deleting the datastore file [datastore2] 50b84adc-5ff3-4a1e-a09f-5c96daef9b87 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 613.336941] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-56560c28-f980-480a-9e1c-5a81934276c2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.339998] env[69328]: DEBUG oslo_vmware.api [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Waiting for the task: (returnval){ [ 613.339998] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]525f3ce4-90fc-14a9-8893-0114873a6d7e" [ 613.339998] env[69328]: _type = "Task" [ 613.339998] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.351036] env[69328]: DEBUG oslo_vmware.api [None req-423c7c80-1271-4098-b5b6-1229ad381bd9 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Waiting for the task: (returnval){ [ 613.351036] env[69328]: value = "task-3272722" [ 613.351036] env[69328]: _type = "Task" [ 613.351036] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.360750] env[69328]: DEBUG oslo_vmware.api [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]525f3ce4-90fc-14a9-8893-0114873a6d7e, 'name': SearchDatastore_Task, 'duration_secs': 0.013014} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.362245] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 7b348a95-3ab2-4112-87e3-b17504c0a302 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 613.369162] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-554ce752-ca51-4b97-9df3-1ae218025ff1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.371131] env[69328]: DEBUG oslo_vmware.api [None req-423c7c80-1271-4098-b5b6-1229ad381bd9 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3272722, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.375640] env[69328]: DEBUG oslo_vmware.api [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Waiting for the task: (returnval){ [ 613.375640] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]529628b8-b94d-b908-9cdc-3b55d7a8646b" [ 613.375640] env[69328]: _type = "Task" [ 613.375640] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.386860] env[69328]: DEBUG oslo_vmware.api [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]529628b8-b94d-b908-9cdc-3b55d7a8646b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.412221] env[69328]: DEBUG nova.network.neutron [-] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 613.533181] env[69328]: DEBUG oslo_concurrency.lockutils [None req-72e7da28-9158-4a96-beb1-a3bb043ae1d6 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Acquiring lock "230c6278-65af-4f5d-b817-0b695086c29d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 613.533181] env[69328]: DEBUG oslo_concurrency.lockutils [None req-72e7da28-9158-4a96-beb1-a3bb043ae1d6 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Lock "230c6278-65af-4f5d-b817-0b695086c29d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 613.533181] env[69328]: DEBUG oslo_concurrency.lockutils [None req-72e7da28-9158-4a96-beb1-a3bb043ae1d6 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Acquiring lock "230c6278-65af-4f5d-b817-0b695086c29d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 613.533363] env[69328]: DEBUG oslo_concurrency.lockutils [None req-72e7da28-9158-4a96-beb1-a3bb043ae1d6 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Lock "230c6278-65af-4f5d-b817-0b695086c29d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 613.533412] env[69328]: DEBUG oslo_concurrency.lockutils [None req-72e7da28-9158-4a96-beb1-a3bb043ae1d6 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Lock "230c6278-65af-4f5d-b817-0b695086c29d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 613.536910] env[69328]: INFO nova.compute.manager [None req-72e7da28-9158-4a96-beb1-a3bb043ae1d6 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 230c6278-65af-4f5d-b817-0b695086c29d] Terminating instance [ 613.591997] env[69328]: DEBUG oslo_concurrency.lockutils [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Releasing lock "refresh_cache-a798c3f2-ccde-488e-8a14-21f4a04f8e12" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 613.592417] env[69328]: DEBUG nova.compute.manager [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Instance network_info: |[{"id": "369bd8af-cb0d-49c0-b41e-69689c57cc0a", "address": "fa:16:3e:d2:78:c0", "network": {"id": "f5e5a30f-d08e-46d6-9b1f-18d5e4410095", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-967086728-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4c353c4bd87647548297e8b8553a48e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d377d75-3add-4a15-8691-74b2eb010924", "external-id": "nsx-vlan-transportzone-71", "segmentation_id": 71, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap369bd8af-cb", "ovs_interfaceid": "369bd8af-cb0d-49c0-b41e-69689c57cc0a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 613.593917] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d2:78:c0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7d377d75-3add-4a15-8691-74b2eb010924', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '369bd8af-cb0d-49c0-b41e-69689c57cc0a', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 613.604735] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Creating folder: Project (4c353c4bd87647548297e8b8553a48e3). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 613.612338] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bfb893e7-d01b-4cab-8daf-2aea7e4a9508 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.631536] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Created folder: Project (4c353c4bd87647548297e8b8553a48e3) in parent group-v653649. [ 613.631722] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Creating folder: Instances. Parent ref: group-v653681. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 613.632461] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ec95ff91-fe07-4163-9aeb-7d34ec13b1db {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.645688] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Created folder: Instances in parent group-v653681. [ 613.645688] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 613.645915] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 613.646082] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c4be9f12-b8fe-4519-aa0f-c09de266160a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.670246] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 613.670246] env[69328]: value = "task-3272725" [ 613.670246] env[69328]: _type = "Task" [ 613.670246] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.680303] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272725, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.747611] env[69328]: DEBUG oslo_vmware.api [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3272720, 'name': Rename_Task, 'duration_secs': 0.207078} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.748623] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: ed10d511-dbed-4884-8ac6-f737173f62c5] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 613.748623] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0afae63e-37ef-494c-a554-b3909f726c53 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.757631] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cec7d7a2-3c93-4a75-8406-5941328d9ff9 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Lock "d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.142s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 613.760468] env[69328]: DEBUG oslo_vmware.api [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 613.760468] env[69328]: value = "task-3272726" [ 613.760468] env[69328]: _type = "Task" [ 613.760468] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.773140] env[69328]: DEBUG oslo_vmware.api [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3272726, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.823814] env[69328]: DEBUG oslo_vmware.api [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5279d47f-1fe1-c219-0b39-286c6050618a, 'name': SearchDatastore_Task, 'duration_secs': 0.025231} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.824352] env[69328]: DEBUG oslo_concurrency.lockutils [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 613.824726] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 613.825615] env[69328]: DEBUG oslo_concurrency.lockutils [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 613.870747] env[69328]: INFO nova.compute.manager [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] Took 29.03 seconds to build instance. [ 613.870747] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 6102f8e6-f815-4f5f-921f-990be81fca0d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 613.886466] env[69328]: DEBUG oslo_vmware.api [None req-423c7c80-1271-4098-b5b6-1229ad381bd9 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3272722, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.314245} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.887284] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-423c7c80-1271-4098-b5b6-1229ad381bd9 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 613.887523] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-423c7c80-1271-4098-b5b6-1229ad381bd9 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 50b84adc-5ff3-4a1e-a09f-5c96daef9b87] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 613.887733] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-423c7c80-1271-4098-b5b6-1229ad381bd9 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 50b84adc-5ff3-4a1e-a09f-5c96daef9b87] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 613.888034] env[69328]: INFO nova.compute.manager [None req-423c7c80-1271-4098-b5b6-1229ad381bd9 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 50b84adc-5ff3-4a1e-a09f-5c96daef9b87] Took 1.23 seconds to destroy the instance on the hypervisor. [ 613.888401] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-423c7c80-1271-4098-b5b6-1229ad381bd9 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 613.888649] env[69328]: DEBUG nova.compute.manager [-] [instance: 50b84adc-5ff3-4a1e-a09f-5c96daef9b87] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 613.889196] env[69328]: DEBUG nova.network.neutron [-] [instance: 50b84adc-5ff3-4a1e-a09f-5c96daef9b87] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 613.897180] env[69328]: DEBUG oslo_vmware.api [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]529628b8-b94d-b908-9cdc-3b55d7a8646b, 'name': SearchDatastore_Task, 'duration_secs': 0.0135} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.897180] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 613.897986] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2/d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 613.898612] env[69328]: DEBUG oslo_concurrency.lockutils [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 613.898865] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 613.899133] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f04a60c2-2dbd-4063-b10a-4a5bd5f0580d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.903347] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5b085a21-5a8f-476c-ab3b-917eb44668bf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.914393] env[69328]: DEBUG oslo_vmware.api [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Waiting for the task: (returnval){ [ 613.914393] env[69328]: value = "task-3272727" [ 613.914393] env[69328]: _type = "Task" [ 613.914393] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.915045] env[69328]: INFO nova.compute.manager [-] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] Took 1.75 seconds to deallocate network for instance. [ 613.936359] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 613.936574] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 613.937925] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-246d8807-e052-44e3-a579-f7d5ff4efb55 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.945304] env[69328]: DEBUG oslo_vmware.api [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Task: {'id': task-3272727, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.951201] env[69328]: DEBUG oslo_vmware.api [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Waiting for the task: (returnval){ [ 613.951201] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52354970-52e7-4109-e045-7af2b974e4ed" [ 613.951201] env[69328]: _type = "Task" [ 613.951201] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.961623] env[69328]: DEBUG oslo_vmware.api [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52354970-52e7-4109-e045-7af2b974e4ed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.031087] env[69328]: DEBUG nova.network.neutron [req-1e5dbb3a-bad6-4570-8d8b-7f4aca583dc8 req-c743623b-20bf-4e60-8f14-cafce5382c18 service nova] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Updated VIF entry in instance network info cache for port f00ec7b2-0d01-4e8c-b30b-50314520c094. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 614.031759] env[69328]: DEBUG nova.network.neutron [req-1e5dbb3a-bad6-4570-8d8b-7f4aca583dc8 req-c743623b-20bf-4e60-8f14-cafce5382c18 service nova] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Updating instance_info_cache with network_info: [{"id": "f00ec7b2-0d01-4e8c-b30b-50314520c094", "address": "fa:16:3e:4c:b2:68", "network": {"id": "a1617aa6-27e2-4648-ad66-bff29c8d3d2a", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-897974516-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.212", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2da97117081d44cab074540e0b39d0e5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98f447de-d71e-41ef-bc37-ed97b4a1f58f", "external-id": "nsx-vlan-transportzone-904", "segmentation_id": 904, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf00ec7b2-0d", "ovs_interfaceid": "f00ec7b2-0d01-4e8c-b30b-50314520c094", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 614.043392] env[69328]: DEBUG nova.compute.manager [None req-72e7da28-9158-4a96-beb1-a3bb043ae1d6 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 230c6278-65af-4f5d-b817-0b695086c29d] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 614.045314] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-72e7da28-9158-4a96-beb1-a3bb043ae1d6 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 230c6278-65af-4f5d-b817-0b695086c29d] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 614.048358] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-191cd60a-b7a6-4950-931b-a503ace5a568 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.061130] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-72e7da28-9158-4a96-beb1-a3bb043ae1d6 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 230c6278-65af-4f5d-b817-0b695086c29d] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 614.061505] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-904ddc03-f235-4e27-8a2f-d86ee2d7fd27 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.072182] env[69328]: DEBUG oslo_vmware.api [None req-72e7da28-9158-4a96-beb1-a3bb043ae1d6 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Waiting for the task: (returnval){ [ 614.072182] env[69328]: value = "task-3272728" [ 614.072182] env[69328]: _type = "Task" [ 614.072182] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.085736] env[69328]: DEBUG oslo_vmware.api [None req-72e7da28-9158-4a96-beb1-a3bb043ae1d6 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3272728, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.118127] env[69328]: DEBUG oslo_concurrency.lockutils [None req-70ea211f-5811-4681-9f41-3a22cf226773 tempest-DeleteServersAdminTestJSON-1713601757 tempest-DeleteServersAdminTestJSON-1713601757-project-admin] Acquiring lock "caba3b5c-db15-4de6-8d3d-41f6751f1b83" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 614.118388] env[69328]: DEBUG oslo_concurrency.lockutils [None req-70ea211f-5811-4681-9f41-3a22cf226773 tempest-DeleteServersAdminTestJSON-1713601757 tempest-DeleteServersAdminTestJSON-1713601757-project-admin] Lock "caba3b5c-db15-4de6-8d3d-41f6751f1b83" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 614.118665] env[69328]: DEBUG oslo_concurrency.lockutils [None req-70ea211f-5811-4681-9f41-3a22cf226773 tempest-DeleteServersAdminTestJSON-1713601757 tempest-DeleteServersAdminTestJSON-1713601757-project-admin] Acquiring lock "caba3b5c-db15-4de6-8d3d-41f6751f1b83-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 614.119145] env[69328]: DEBUG oslo_concurrency.lockutils [None req-70ea211f-5811-4681-9f41-3a22cf226773 tempest-DeleteServersAdminTestJSON-1713601757 tempest-DeleteServersAdminTestJSON-1713601757-project-admin] Lock "caba3b5c-db15-4de6-8d3d-41f6751f1b83-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 614.119145] env[69328]: DEBUG oslo_concurrency.lockutils [None req-70ea211f-5811-4681-9f41-3a22cf226773 tempest-DeleteServersAdminTestJSON-1713601757 tempest-DeleteServersAdminTestJSON-1713601757-project-admin] Lock "caba3b5c-db15-4de6-8d3d-41f6751f1b83-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 614.121762] env[69328]: INFO nova.compute.manager [None req-70ea211f-5811-4681-9f41-3a22cf226773 tempest-DeleteServersAdminTestJSON-1713601757 tempest-DeleteServersAdminTestJSON-1713601757-project-admin] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Terminating instance [ 614.187042] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272725, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.267616] env[69328]: DEBUG nova.compute.manager [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 614.280089] env[69328]: DEBUG oslo_vmware.api [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3272726, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.378924] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b13e2c32-6465-4763-86d8-1fbc13877fbb tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Lock "d97dc6d5-e55f-4b9e-91e6-cfdea82f5236" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.547s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 614.387812] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 46526210-2783-408d-9ecb-773f33ff0c66 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 614.439943] env[69328]: DEBUG oslo_concurrency.lockutils [None req-aec69d4d-dbfb-4864-9814-8fe245f37e57 tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 614.440792] env[69328]: DEBUG oslo_vmware.api [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Task: {'id': task-3272727, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.463778] env[69328]: DEBUG oslo_vmware.api [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52354970-52e7-4109-e045-7af2b974e4ed, 'name': SearchDatastore_Task, 'duration_secs': 0.011748} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.464606] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4656933f-972d-4472-97c7-f6f37cc94ac8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.472691] env[69328]: DEBUG oslo_vmware.api [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Waiting for the task: (returnval){ [ 614.472691] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e10034-6a2e-465e-697e-01712e293a41" [ 614.472691] env[69328]: _type = "Task" [ 614.472691] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.485192] env[69328]: DEBUG oslo_vmware.api [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e10034-6a2e-465e-697e-01712e293a41, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.535083] env[69328]: DEBUG oslo_concurrency.lockutils [req-1e5dbb3a-bad6-4570-8d8b-7f4aca583dc8 req-c743623b-20bf-4e60-8f14-cafce5382c18 service nova] Releasing lock "refresh_cache-88f9f0c2-0c55-45bf-a494-8f1ee4922443" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 614.535083] env[69328]: DEBUG nova.compute.manager [req-1e5dbb3a-bad6-4570-8d8b-7f4aca583dc8 req-c743623b-20bf-4e60-8f14-cafce5382c18 service nova] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Received event network-vif-plugged-09c4fb65-f87f-4fdc-9a85-cf73224a3ca3 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 614.535495] env[69328]: DEBUG oslo_concurrency.lockutils [req-1e5dbb3a-bad6-4570-8d8b-7f4aca583dc8 req-c743623b-20bf-4e60-8f14-cafce5382c18 service nova] Acquiring lock "d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 614.535574] env[69328]: DEBUG oslo_concurrency.lockutils [req-1e5dbb3a-bad6-4570-8d8b-7f4aca583dc8 req-c743623b-20bf-4e60-8f14-cafce5382c18 service nova] Lock "d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 614.535803] env[69328]: DEBUG oslo_concurrency.lockutils [req-1e5dbb3a-bad6-4570-8d8b-7f4aca583dc8 req-c743623b-20bf-4e60-8f14-cafce5382c18 service nova] Lock "d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 614.536246] env[69328]: DEBUG nova.compute.manager [req-1e5dbb3a-bad6-4570-8d8b-7f4aca583dc8 req-c743623b-20bf-4e60-8f14-cafce5382c18 service nova] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] No waiting events found dispatching network-vif-plugged-09c4fb65-f87f-4fdc-9a85-cf73224a3ca3 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 614.536246] env[69328]: WARNING nova.compute.manager [req-1e5dbb3a-bad6-4570-8d8b-7f4aca583dc8 req-c743623b-20bf-4e60-8f14-cafce5382c18 service nova] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Received unexpected event network-vif-plugged-09c4fb65-f87f-4fdc-9a85-cf73224a3ca3 for instance with vm_state building and task_state spawning. [ 614.536373] env[69328]: DEBUG nova.compute.manager [req-1e5dbb3a-bad6-4570-8d8b-7f4aca583dc8 req-c743623b-20bf-4e60-8f14-cafce5382c18 service nova] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Received event network-changed-09c4fb65-f87f-4fdc-9a85-cf73224a3ca3 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 614.536528] env[69328]: DEBUG nova.compute.manager [req-1e5dbb3a-bad6-4570-8d8b-7f4aca583dc8 req-c743623b-20bf-4e60-8f14-cafce5382c18 service nova] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Refreshing instance network info cache due to event network-changed-09c4fb65-f87f-4fdc-9a85-cf73224a3ca3. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 614.536850] env[69328]: DEBUG oslo_concurrency.lockutils [req-1e5dbb3a-bad6-4570-8d8b-7f4aca583dc8 req-c743623b-20bf-4e60-8f14-cafce5382c18 service nova] Acquiring lock "refresh_cache-d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 614.536850] env[69328]: DEBUG oslo_concurrency.lockutils [req-1e5dbb3a-bad6-4570-8d8b-7f4aca583dc8 req-c743623b-20bf-4e60-8f14-cafce5382c18 service nova] Acquired lock "refresh_cache-d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 614.537051] env[69328]: DEBUG nova.network.neutron [req-1e5dbb3a-bad6-4570-8d8b-7f4aca583dc8 req-c743623b-20bf-4e60-8f14-cafce5382c18 service nova] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Refreshing network info cache for port 09c4fb65-f87f-4fdc-9a85-cf73224a3ca3 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 614.586991] env[69328]: DEBUG oslo_vmware.api [None req-72e7da28-9158-4a96-beb1-a3bb043ae1d6 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3272728, 'name': PowerOffVM_Task, 'duration_secs': 0.229655} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.586991] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-72e7da28-9158-4a96-beb1-a3bb043ae1d6 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 230c6278-65af-4f5d-b817-0b695086c29d] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 614.586991] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-72e7da28-9158-4a96-beb1-a3bb043ae1d6 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 230c6278-65af-4f5d-b817-0b695086c29d] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 614.586991] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c2844333-730a-45dc-bd97-bc5a77ee7d89 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.629051] env[69328]: DEBUG nova.compute.manager [None req-70ea211f-5811-4681-9f41-3a22cf226773 tempest-DeleteServersAdminTestJSON-1713601757 tempest-DeleteServersAdminTestJSON-1713601757-project-admin] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 614.629051] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-70ea211f-5811-4681-9f41-3a22cf226773 tempest-DeleteServersAdminTestJSON-1713601757 tempest-DeleteServersAdminTestJSON-1713601757-project-admin] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 614.629051] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1af202f-0d0c-40aa-b61f-05950137f993 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.641801] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-70ea211f-5811-4681-9f41-3a22cf226773 tempest-DeleteServersAdminTestJSON-1713601757 tempest-DeleteServersAdminTestJSON-1713601757-project-admin] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 614.642443] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f9123f5c-6ea9-4772-81d7-c3328f40692c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.666263] env[69328]: DEBUG oslo_vmware.api [None req-70ea211f-5811-4681-9f41-3a22cf226773 tempest-DeleteServersAdminTestJSON-1713601757 tempest-DeleteServersAdminTestJSON-1713601757-project-admin] Waiting for the task: (returnval){ [ 614.666263] env[69328]: value = "task-3272730" [ 614.666263] env[69328]: _type = "Task" [ 614.666263] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.675751] env[69328]: DEBUG oslo_vmware.api [None req-70ea211f-5811-4681-9f41-3a22cf226773 tempest-DeleteServersAdminTestJSON-1713601757 tempest-DeleteServersAdminTestJSON-1713601757-project-admin] Task: {'id': task-3272730, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.681912] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-72e7da28-9158-4a96-beb1-a3bb043ae1d6 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 230c6278-65af-4f5d-b817-0b695086c29d] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 614.684717] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-72e7da28-9158-4a96-beb1-a3bb043ae1d6 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 230c6278-65af-4f5d-b817-0b695086c29d] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 614.684921] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-72e7da28-9158-4a96-beb1-a3bb043ae1d6 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Deleting the datastore file [datastore2] 230c6278-65af-4f5d-b817-0b695086c29d {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 614.689162] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-10fa4424-8ba6-4950-9253-c023eaab1900 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.691191] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272725, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.697153] env[69328]: DEBUG oslo_vmware.api [None req-72e7da28-9158-4a96-beb1-a3bb043ae1d6 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Waiting for the task: (returnval){ [ 614.697153] env[69328]: value = "task-3272731" [ 614.697153] env[69328]: _type = "Task" [ 614.697153] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.707245] env[69328]: DEBUG oslo_vmware.api [None req-72e7da28-9158-4a96-beb1-a3bb043ae1d6 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3272731, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.773741] env[69328]: DEBUG oslo_vmware.api [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3272726, 'name': PowerOnVM_Task, 'duration_secs': 0.889301} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.775445] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: ed10d511-dbed-4884-8ac6-f737173f62c5] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 614.775445] env[69328]: INFO nova.compute.manager [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: ed10d511-dbed-4884-8ac6-f737173f62c5] Took 10.09 seconds to spawn the instance on the hypervisor. [ 614.775445] env[69328]: DEBUG nova.compute.manager [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: ed10d511-dbed-4884-8ac6-f737173f62c5] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 614.775445] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7174175-1bdf-43e2-b967-b33fe360571c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.811870] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 614.887921] env[69328]: DEBUG nova.compute.manager [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 614.897790] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance e92953f4-b634-4ef9-a5ad-63a886cfa007 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 614.927801] env[69328]: DEBUG oslo_vmware.api [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Task: {'id': task-3272727, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.568802} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.928122] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2/d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 614.928808] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 614.928808] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3b1b3821-0092-4306-a625-85ddd61c283d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.938040] env[69328]: DEBUG oslo_vmware.api [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Waiting for the task: (returnval){ [ 614.938040] env[69328]: value = "task-3272732" [ 614.938040] env[69328]: _type = "Task" [ 614.938040] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.948530] env[69328]: DEBUG oslo_vmware.api [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Task: {'id': task-3272732, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.983981] env[69328]: DEBUG oslo_vmware.api [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e10034-6a2e-465e-697e-01712e293a41, 'name': SearchDatastore_Task, 'duration_secs': 0.011653} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.984305] env[69328]: DEBUG oslo_concurrency.lockutils [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 614.984593] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 26feb2d1-ff64-4a13-af83-b6d5fe4348e1/26feb2d1-ff64-4a13-af83-b6d5fe4348e1.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 614.984854] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-120a6456-badf-4522-a7e5-ec1ce9086de1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.994420] env[69328]: DEBUG oslo_vmware.api [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Waiting for the task: (returnval){ [ 614.994420] env[69328]: value = "task-3272733" [ 614.994420] env[69328]: _type = "Task" [ 614.994420] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.009760] env[69328]: DEBUG oslo_vmware.api [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Task: {'id': task-3272733, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.198389] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272725, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.198921] env[69328]: DEBUG oslo_vmware.api [None req-70ea211f-5811-4681-9f41-3a22cf226773 tempest-DeleteServersAdminTestJSON-1713601757 tempest-DeleteServersAdminTestJSON-1713601757-project-admin] Task: {'id': task-3272730, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.212196] env[69328]: DEBUG oslo_vmware.api [None req-72e7da28-9158-4a96-beb1-a3bb043ae1d6 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3272731, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.266846} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.212562] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-72e7da28-9158-4a96-beb1-a3bb043ae1d6 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 615.212974] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-72e7da28-9158-4a96-beb1-a3bb043ae1d6 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 230c6278-65af-4f5d-b817-0b695086c29d] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 615.213092] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-72e7da28-9158-4a96-beb1-a3bb043ae1d6 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 230c6278-65af-4f5d-b817-0b695086c29d] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 615.213270] env[69328]: INFO nova.compute.manager [None req-72e7da28-9158-4a96-beb1-a3bb043ae1d6 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 230c6278-65af-4f5d-b817-0b695086c29d] Took 1.17 seconds to destroy the instance on the hypervisor. [ 615.213540] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-72e7da28-9158-4a96-beb1-a3bb043ae1d6 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 615.213826] env[69328]: DEBUG nova.compute.manager [-] [instance: 230c6278-65af-4f5d-b817-0b695086c29d] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 615.214509] env[69328]: DEBUG nova.network.neutron [-] [instance: 230c6278-65af-4f5d-b817-0b695086c29d] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 615.306898] env[69328]: INFO nova.compute.manager [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: ed10d511-dbed-4884-8ac6-f737173f62c5] Took 28.30 seconds to build instance. [ 615.402647] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 9753734d-90f0-4661-8029-ec312e88eb60 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 615.427415] env[69328]: DEBUG oslo_concurrency.lockutils [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 615.453250] env[69328]: DEBUG oslo_vmware.api [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Task: {'id': task-3272732, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.323576} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.453250] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 615.453439] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2298b988-0cba-496e-9e93-1f5de5e75cba {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.485967] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Reconfiguring VM instance instance-0000000a to attach disk [datastore1] d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2/d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 615.485967] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7fad78bd-959f-44a9-b5aa-7021b915baca {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.503530] env[69328]: DEBUG nova.network.neutron [-] [instance: 50b84adc-5ff3-4a1e-a09f-5c96daef9b87] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 615.516701] env[69328]: DEBUG oslo_vmware.api [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Task: {'id': task-3272733, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.518616] env[69328]: DEBUG oslo_vmware.api [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Waiting for the task: (returnval){ [ 615.518616] env[69328]: value = "task-3272734" [ 615.518616] env[69328]: _type = "Task" [ 615.518616] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.525049] env[69328]: DEBUG nova.network.neutron [req-1e5dbb3a-bad6-4570-8d8b-7f4aca583dc8 req-c743623b-20bf-4e60-8f14-cafce5382c18 service nova] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Updated VIF entry in instance network info cache for port 09c4fb65-f87f-4fdc-9a85-cf73224a3ca3. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 615.525467] env[69328]: DEBUG nova.network.neutron [req-1e5dbb3a-bad6-4570-8d8b-7f4aca583dc8 req-c743623b-20bf-4e60-8f14-cafce5382c18 service nova] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Updating instance_info_cache with network_info: [{"id": "09c4fb65-f87f-4fdc-9a85-cf73224a3ca3", "address": "fa:16:3e:e6:08:a2", "network": {"id": "b7b15f77-0584-4f19-a05e-67df3efe1b9d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-778653716-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8502178b3d334c338b63dfde3eae8f08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d275d7c6-2a7b-4ee8-b6f4-fabf1ba1905f", "external-id": "nsx-vlan-transportzone-513", "segmentation_id": 513, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09c4fb65-f8", "ovs_interfaceid": "09c4fb65-f87f-4fdc-9a85-cf73224a3ca3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 615.532059] env[69328]: DEBUG oslo_vmware.api [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Task: {'id': task-3272734, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.675964] env[69328]: DEBUG oslo_vmware.api [None req-70ea211f-5811-4681-9f41-3a22cf226773 tempest-DeleteServersAdminTestJSON-1713601757 tempest-DeleteServersAdminTestJSON-1713601757-project-admin] Task: {'id': task-3272730, 'name': PowerOffVM_Task, 'duration_secs': 0.741769} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.676541] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-70ea211f-5811-4681-9f41-3a22cf226773 tempest-DeleteServersAdminTestJSON-1713601757 tempest-DeleteServersAdminTestJSON-1713601757-project-admin] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 615.676716] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-70ea211f-5811-4681-9f41-3a22cf226773 tempest-DeleteServersAdminTestJSON-1713601757 tempest-DeleteServersAdminTestJSON-1713601757-project-admin] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 615.681082] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7f8db800-505a-40e0-9658-8846e409b5bb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.688947] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272725, 'name': CreateVM_Task, 'duration_secs': 1.840678} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.689327] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 615.690073] env[69328]: DEBUG oslo_concurrency.lockutils [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 615.690275] env[69328]: DEBUG oslo_concurrency.lockutils [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 615.690643] env[69328]: DEBUG oslo_concurrency.lockutils [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 615.690923] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a6c2b75-c30b-4a86-9d33-54289755e477 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.697702] env[69328]: DEBUG oslo_vmware.api [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Waiting for the task: (returnval){ [ 615.697702] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52720e5f-2f43-8946-6fff-bff20327bac8" [ 615.697702] env[69328]: _type = "Task" [ 615.697702] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.707548] env[69328]: DEBUG oslo_vmware.api [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52720e5f-2f43-8946-6fff-bff20327bac8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.761521] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-70ea211f-5811-4681-9f41-3a22cf226773 tempest-DeleteServersAdminTestJSON-1713601757 tempest-DeleteServersAdminTestJSON-1713601757-project-admin] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 615.761803] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-70ea211f-5811-4681-9f41-3a22cf226773 tempest-DeleteServersAdminTestJSON-1713601757 tempest-DeleteServersAdminTestJSON-1713601757-project-admin] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 615.762463] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-70ea211f-5811-4681-9f41-3a22cf226773 tempest-DeleteServersAdminTestJSON-1713601757 tempest-DeleteServersAdminTestJSON-1713601757-project-admin] Deleting the datastore file [datastore1] caba3b5c-db15-4de6-8d3d-41f6751f1b83 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 615.762836] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-561d263c-ff81-4776-9b71-a2fd4148cf70 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.771263] env[69328]: DEBUG oslo_vmware.api [None req-70ea211f-5811-4681-9f41-3a22cf226773 tempest-DeleteServersAdminTestJSON-1713601757 tempest-DeleteServersAdminTestJSON-1713601757-project-admin] Waiting for the task: (returnval){ [ 615.771263] env[69328]: value = "task-3272736" [ 615.771263] env[69328]: _type = "Task" [ 615.771263] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.786974] env[69328]: DEBUG oslo_vmware.api [None req-70ea211f-5811-4681-9f41-3a22cf226773 tempest-DeleteServersAdminTestJSON-1713601757 tempest-DeleteServersAdminTestJSON-1713601757-project-admin] Task: {'id': task-3272736, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.809517] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f1bdcc52-a0be-4a41-8b5a-61b5d58b2796 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "ed10d511-dbed-4884-8ac6-f737173f62c5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.813s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 615.902912] env[69328]: DEBUG nova.compute.manager [req-839b04b9-a3c8-47c4-ba46-e3fc4d0a0c55 req-db51e847-046d-4a99-9460-162f00e04250 service nova] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] Received event network-vif-deleted-6cff9499-a9e2-4bf0-8d52-582ddcfd6392 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 615.912701] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 15a8de08-4d20-4329-9867-53e5dff82878 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 615.912931] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Total usable vcpus: 48, total allocated vcpus: 12 {{(pid=69328) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 615.913113] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2816MB phys_disk=200GB used_disk=12GB total_vcpus=48 used_vcpus=12 pci_stats=[] {{(pid=69328) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 616.010568] env[69328]: INFO nova.compute.manager [-] [instance: 50b84adc-5ff3-4a1e-a09f-5c96daef9b87] Took 2.12 seconds to deallocate network for instance. [ 616.023929] env[69328]: DEBUG oslo_vmware.api [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Task: {'id': task-3272733, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.648669} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 616.027841] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 26feb2d1-ff64-4a13-af83-b6d5fe4348e1/26feb2d1-ff64-4a13-af83-b6d5fe4348e1.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 616.028113] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 616.028380] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-59f404fa-0048-45d8-ae7e-3d70765bd4ce {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.033669] env[69328]: DEBUG oslo_concurrency.lockutils [req-1e5dbb3a-bad6-4570-8d8b-7f4aca583dc8 req-c743623b-20bf-4e60-8f14-cafce5382c18 service nova] Releasing lock "refresh_cache-d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 616.033916] env[69328]: DEBUG nova.compute.manager [req-1e5dbb3a-bad6-4570-8d8b-7f4aca583dc8 req-c743623b-20bf-4e60-8f14-cafce5382c18 service nova] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Received event network-vif-plugged-369bd8af-cb0d-49c0-b41e-69689c57cc0a {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 616.034416] env[69328]: DEBUG oslo_concurrency.lockutils [req-1e5dbb3a-bad6-4570-8d8b-7f4aca583dc8 req-c743623b-20bf-4e60-8f14-cafce5382c18 service nova] Acquiring lock "a798c3f2-ccde-488e-8a14-21f4a04f8e12-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 616.034664] env[69328]: DEBUG oslo_concurrency.lockutils [req-1e5dbb3a-bad6-4570-8d8b-7f4aca583dc8 req-c743623b-20bf-4e60-8f14-cafce5382c18 service nova] Lock "a798c3f2-ccde-488e-8a14-21f4a04f8e12-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 616.034826] env[69328]: DEBUG oslo_concurrency.lockutils [req-1e5dbb3a-bad6-4570-8d8b-7f4aca583dc8 req-c743623b-20bf-4e60-8f14-cafce5382c18 service nova] Lock "a798c3f2-ccde-488e-8a14-21f4a04f8e12-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 616.035018] env[69328]: DEBUG nova.compute.manager [req-1e5dbb3a-bad6-4570-8d8b-7f4aca583dc8 req-c743623b-20bf-4e60-8f14-cafce5382c18 service nova] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] No waiting events found dispatching network-vif-plugged-369bd8af-cb0d-49c0-b41e-69689c57cc0a {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 616.035182] env[69328]: WARNING nova.compute.manager [req-1e5dbb3a-bad6-4570-8d8b-7f4aca583dc8 req-c743623b-20bf-4e60-8f14-cafce5382c18 service nova] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Received unexpected event network-vif-plugged-369bd8af-cb0d-49c0-b41e-69689c57cc0a for instance with vm_state building and task_state spawning. [ 616.035346] env[69328]: DEBUG nova.compute.manager [req-1e5dbb3a-bad6-4570-8d8b-7f4aca583dc8 req-c743623b-20bf-4e60-8f14-cafce5382c18 service nova] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Received event network-changed-369bd8af-cb0d-49c0-b41e-69689c57cc0a {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 616.035492] env[69328]: DEBUG nova.compute.manager [req-1e5dbb3a-bad6-4570-8d8b-7f4aca583dc8 req-c743623b-20bf-4e60-8f14-cafce5382c18 service nova] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Refreshing instance network info cache due to event network-changed-369bd8af-cb0d-49c0-b41e-69689c57cc0a. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 616.035682] env[69328]: DEBUG oslo_concurrency.lockutils [req-1e5dbb3a-bad6-4570-8d8b-7f4aca583dc8 req-c743623b-20bf-4e60-8f14-cafce5382c18 service nova] Acquiring lock "refresh_cache-a798c3f2-ccde-488e-8a14-21f4a04f8e12" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 616.036684] env[69328]: DEBUG oslo_concurrency.lockutils [req-1e5dbb3a-bad6-4570-8d8b-7f4aca583dc8 req-c743623b-20bf-4e60-8f14-cafce5382c18 service nova] Acquired lock "refresh_cache-a798c3f2-ccde-488e-8a14-21f4a04f8e12" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 616.037135] env[69328]: DEBUG nova.network.neutron [req-1e5dbb3a-bad6-4570-8d8b-7f4aca583dc8 req-c743623b-20bf-4e60-8f14-cafce5382c18 service nova] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Refreshing network info cache for port 369bd8af-cb0d-49c0-b41e-69689c57cc0a {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 616.049207] env[69328]: DEBUG oslo_vmware.api [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Task: {'id': task-3272734, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.051979] env[69328]: DEBUG oslo_vmware.api [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Waiting for the task: (returnval){ [ 616.051979] env[69328]: value = "task-3272737" [ 616.051979] env[69328]: _type = "Task" [ 616.051979] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.063567] env[69328]: DEBUG oslo_vmware.api [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Task: {'id': task-3272737, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.216449] env[69328]: DEBUG oslo_vmware.api [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52720e5f-2f43-8946-6fff-bff20327bac8, 'name': SearchDatastore_Task, 'duration_secs': 0.03792} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 616.219478] env[69328]: DEBUG oslo_concurrency.lockutils [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 616.219602] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 616.219860] env[69328]: DEBUG oslo_concurrency.lockutils [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 616.220043] env[69328]: DEBUG oslo_concurrency.lockutils [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 616.222030] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 616.222030] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aee0e087-c593-46d9-b0f4-22c213133619 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.237492] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 616.237683] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 616.238457] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87524806-d0b9-4379-a476-5b32365dc567 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.245137] env[69328]: DEBUG oslo_vmware.api [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Waiting for the task: (returnval){ [ 616.245137] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52482c81-823d-2143-f40d-4386fe767657" [ 616.245137] env[69328]: _type = "Task" [ 616.245137] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.255987] env[69328]: DEBUG oslo_vmware.api [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52482c81-823d-2143-f40d-4386fe767657, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.290288] env[69328]: DEBUG oslo_vmware.api [None req-70ea211f-5811-4681-9f41-3a22cf226773 tempest-DeleteServersAdminTestJSON-1713601757 tempest-DeleteServersAdminTestJSON-1713601757-project-admin] Task: {'id': task-3272736, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.330835} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 616.290288] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-70ea211f-5811-4681-9f41-3a22cf226773 tempest-DeleteServersAdminTestJSON-1713601757 tempest-DeleteServersAdminTestJSON-1713601757-project-admin] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 616.290288] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-70ea211f-5811-4681-9f41-3a22cf226773 tempest-DeleteServersAdminTestJSON-1713601757 tempest-DeleteServersAdminTestJSON-1713601757-project-admin] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 616.290644] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-70ea211f-5811-4681-9f41-3a22cf226773 tempest-DeleteServersAdminTestJSON-1713601757 tempest-DeleteServersAdminTestJSON-1713601757-project-admin] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 616.290644] env[69328]: INFO nova.compute.manager [None req-70ea211f-5811-4681-9f41-3a22cf226773 tempest-DeleteServersAdminTestJSON-1713601757 tempest-DeleteServersAdminTestJSON-1713601757-project-admin] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Took 1.66 seconds to destroy the instance on the hypervisor. [ 616.291122] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-70ea211f-5811-4681-9f41-3a22cf226773 tempest-DeleteServersAdminTestJSON-1713601757 tempest-DeleteServersAdminTestJSON-1713601757-project-admin] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 616.291122] env[69328]: DEBUG nova.compute.manager [-] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 616.291122] env[69328]: DEBUG nova.network.neutron [-] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 616.313412] env[69328]: DEBUG nova.compute.manager [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 616.371791] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59b56247-bc74-433b-996b-f96815972d46 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.385134] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42fad5de-1af8-48fe-8bcf-441a50b3a42a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.421911] env[69328]: DEBUG nova.network.neutron [-] [instance: 230c6278-65af-4f5d-b817-0b695086c29d] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 616.427020] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bf4e093-16ac-4ef5-b480-3ee59278e756 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.434395] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c12bb0c-b6f1-4ae0-bef3-2b10e35e75ee {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.453759] env[69328]: DEBUG nova.compute.provider_tree [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 616.527824] env[69328]: DEBUG oslo_concurrency.lockutils [None req-423c7c80-1271-4098-b5b6-1229ad381bd9 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 616.537892] env[69328]: DEBUG oslo_vmware.api [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Task: {'id': task-3272734, 'name': ReconfigVM_Task, 'duration_secs': 0.667817} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 616.538203] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Reconfigured VM instance instance-0000000a to attach disk [datastore1] d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2/d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 616.538825] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0ce3956b-a8f5-4a8e-9fa8-fd53ab59ebb0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.548216] env[69328]: DEBUG oslo_vmware.api [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Waiting for the task: (returnval){ [ 616.548216] env[69328]: value = "task-3272738" [ 616.548216] env[69328]: _type = "Task" [ 616.548216] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.568137] env[69328]: DEBUG oslo_vmware.api [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Task: {'id': task-3272738, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.575458] env[69328]: DEBUG oslo_vmware.api [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Task: {'id': task-3272737, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077657} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 616.575725] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 616.576852] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77e883ce-97e7-43f2-8595-d63bb3e15aa1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.605682] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Reconfiguring VM instance instance-0000000c to attach disk [datastore1] 26feb2d1-ff64-4a13-af83-b6d5fe4348e1/26feb2d1-ff64-4a13-af83-b6d5fe4348e1.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 616.607520] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6af823e0-0375-4e9d-bddb-b390f4e3ef3c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.629164] env[69328]: DEBUG nova.network.neutron [-] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 616.637774] env[69328]: DEBUG oslo_vmware.api [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Waiting for the task: (returnval){ [ 616.637774] env[69328]: value = "task-3272739" [ 616.637774] env[69328]: _type = "Task" [ 616.637774] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.648966] env[69328]: DEBUG oslo_vmware.api [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Task: {'id': task-3272739, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.762158] env[69328]: DEBUG oslo_vmware.api [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52482c81-823d-2143-f40d-4386fe767657, 'name': SearchDatastore_Task, 'duration_secs': 0.039649} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 616.762988] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7303666-bf7c-4e10-a5e5-263380306c95 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.772472] env[69328]: DEBUG oslo_vmware.api [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Waiting for the task: (returnval){ [ 616.772472] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5253698b-957a-11ae-1c27-04072b57b69a" [ 616.772472] env[69328]: _type = "Task" [ 616.772472] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.783501] env[69328]: DEBUG oslo_vmware.api [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5253698b-957a-11ae-1c27-04072b57b69a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.841770] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 616.924081] env[69328]: INFO nova.compute.manager [-] [instance: 230c6278-65af-4f5d-b817-0b695086c29d] Took 1.71 seconds to deallocate network for instance. [ 616.956982] env[69328]: DEBUG nova.network.neutron [req-1e5dbb3a-bad6-4570-8d8b-7f4aca583dc8 req-c743623b-20bf-4e60-8f14-cafce5382c18 service nova] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Updated VIF entry in instance network info cache for port 369bd8af-cb0d-49c0-b41e-69689c57cc0a. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 616.957513] env[69328]: DEBUG nova.network.neutron [req-1e5dbb3a-bad6-4570-8d8b-7f4aca583dc8 req-c743623b-20bf-4e60-8f14-cafce5382c18 service nova] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Updating instance_info_cache with network_info: [{"id": "369bd8af-cb0d-49c0-b41e-69689c57cc0a", "address": "fa:16:3e:d2:78:c0", "network": {"id": "f5e5a30f-d08e-46d6-9b1f-18d5e4410095", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-967086728-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4c353c4bd87647548297e8b8553a48e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d377d75-3add-4a15-8691-74b2eb010924", "external-id": "nsx-vlan-transportzone-71", "segmentation_id": 71, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap369bd8af-cb", "ovs_interfaceid": "369bd8af-cb0d-49c0-b41e-69689c57cc0a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 616.961273] env[69328]: DEBUG nova.scheduler.client.report [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 617.059688] env[69328]: DEBUG oslo_vmware.api [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Task: {'id': task-3272738, 'name': Rename_Task, 'duration_secs': 0.48311} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.059935] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 617.060217] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4ed67110-6fdd-4307-9e3b-27fc76d1d5d3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.068419] env[69328]: DEBUG oslo_vmware.api [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Waiting for the task: (returnval){ [ 617.068419] env[69328]: value = "task-3272740" [ 617.068419] env[69328]: _type = "Task" [ 617.068419] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.079551] env[69328]: DEBUG oslo_vmware.api [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Task: {'id': task-3272740, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.132539] env[69328]: INFO nova.compute.manager [-] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Took 0.84 seconds to deallocate network for instance. [ 617.151358] env[69328]: DEBUG oslo_vmware.api [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Task: {'id': task-3272739, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.230398] env[69328]: DEBUG nova.compute.manager [None req-5cce038a-cabe-4692-8cb7-6d8aae040cd1 tempest-ServerExternalEventsTest-1092344189 tempest-ServerExternalEventsTest-1092344189-project] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] Received event network-changed {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 617.230745] env[69328]: DEBUG nova.compute.manager [None req-5cce038a-cabe-4692-8cb7-6d8aae040cd1 tempest-ServerExternalEventsTest-1092344189 tempest-ServerExternalEventsTest-1092344189-project] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] Refreshing instance network info cache due to event network-changed. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 617.230945] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5cce038a-cabe-4692-8cb7-6d8aae040cd1 tempest-ServerExternalEventsTest-1092344189 tempest-ServerExternalEventsTest-1092344189-project] Acquiring lock "refresh_cache-d97dc6d5-e55f-4b9e-91e6-cfdea82f5236" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.231190] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5cce038a-cabe-4692-8cb7-6d8aae040cd1 tempest-ServerExternalEventsTest-1092344189 tempest-ServerExternalEventsTest-1092344189-project] Acquired lock "refresh_cache-d97dc6d5-e55f-4b9e-91e6-cfdea82f5236" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 617.231413] env[69328]: DEBUG nova.network.neutron [None req-5cce038a-cabe-4692-8cb7-6d8aae040cd1 tempest-ServerExternalEventsTest-1092344189 tempest-ServerExternalEventsTest-1092344189-project] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 617.284018] env[69328]: DEBUG oslo_vmware.api [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5253698b-957a-11ae-1c27-04072b57b69a, 'name': SearchDatastore_Task, 'duration_secs': 0.040129} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.284302] env[69328]: DEBUG oslo_concurrency.lockutils [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 617.284574] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] a798c3f2-ccde-488e-8a14-21f4a04f8e12/a798c3f2-ccde-488e-8a14-21f4a04f8e12.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 617.284843] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-75750700-f30a-4afa-b335-823daa912a3f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.292975] env[69328]: DEBUG oslo_vmware.api [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Waiting for the task: (returnval){ [ 617.292975] env[69328]: value = "task-3272741" [ 617.292975] env[69328]: _type = "Task" [ 617.292975] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.303071] env[69328]: DEBUG oslo_vmware.api [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Task: {'id': task-3272741, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.369921] env[69328]: DEBUG nova.compute.manager [req-76ad612b-636d-43fd-9498-b1dd89646196 req-01b853c4-37b8-4389-ac6c-714d74c0cc35 service nova] [instance: 230c6278-65af-4f5d-b817-0b695086c29d] Received event network-vif-deleted-f2be515c-61cb-4257-b9e3-858bf3798d6d {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 617.435624] env[69328]: DEBUG oslo_concurrency.lockutils [None req-72e7da28-9158-4a96-beb1-a3bb043ae1d6 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 617.464365] env[69328]: DEBUG oslo_concurrency.lockutils [req-1e5dbb3a-bad6-4570-8d8b-7f4aca583dc8 req-c743623b-20bf-4e60-8f14-cafce5382c18 service nova] Releasing lock "refresh_cache-a798c3f2-ccde-488e-8a14-21f4a04f8e12" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 617.467672] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69328) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 617.467672] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 7.175s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 617.467672] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.491s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 617.467861] env[69328]: INFO nova.compute.claims [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] [instance: edb1a21a-6907-4198-a977-c1213e8fecc0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 617.582515] env[69328]: DEBUG oslo_vmware.api [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Task: {'id': task-3272740, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.619268] env[69328]: DEBUG nova.compute.manager [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Stashing vm_state: active {{(pid=69328) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 617.641255] env[69328]: DEBUG oslo_concurrency.lockutils [None req-70ea211f-5811-4681-9f41-3a22cf226773 tempest-DeleteServersAdminTestJSON-1713601757 tempest-DeleteServersAdminTestJSON-1713601757-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 617.660715] env[69328]: DEBUG oslo_vmware.api [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Task: {'id': task-3272739, 'name': ReconfigVM_Task, 'duration_secs': 0.793642} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.662589] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Reconfigured VM instance instance-0000000c to attach disk [datastore1] 26feb2d1-ff64-4a13-af83-b6d5fe4348e1/26feb2d1-ff64-4a13-af83-b6d5fe4348e1.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 617.662727] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-80cc5549-3a01-4ce6-a3f2-dbcca346b75d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.674506] env[69328]: DEBUG oslo_vmware.api [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Waiting for the task: (returnval){ [ 617.674506] env[69328]: value = "task-3272742" [ 617.674506] env[69328]: _type = "Task" [ 617.674506] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.689593] env[69328]: DEBUG oslo_vmware.api [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Task: {'id': task-3272742, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.809677] env[69328]: DEBUG oslo_vmware.api [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Task: {'id': task-3272741, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.083727] env[69328]: DEBUG oslo_vmware.api [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Task: {'id': task-3272740, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.109273] env[69328]: DEBUG nova.network.neutron [None req-5cce038a-cabe-4692-8cb7-6d8aae040cd1 tempest-ServerExternalEventsTest-1092344189 tempest-ServerExternalEventsTest-1092344189-project] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] Updating instance_info_cache with network_info: [{"id": "b801ae0c-2061-4103-8530-3d58f8785333", "address": "fa:16:3e:09:6a:91", "network": {"id": "4707aea3-ce46-4fe0-bf5c-141ee5596a86", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.186", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ecd184c6b78b4e4297fb93abb94aa37d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb801ae0c-20", "ovs_interfaceid": "b801ae0c-2061-4103-8530-3d58f8785333", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 618.156557] env[69328]: DEBUG oslo_concurrency.lockutils [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 618.192556] env[69328]: DEBUG oslo_vmware.api [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Task: {'id': task-3272742, 'name': Rename_Task, 'duration_secs': 0.215237} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.192556] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 618.192780] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-34b3ef90-9844-40cf-a54c-bd64509e0983 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.206110] env[69328]: DEBUG oslo_vmware.api [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Waiting for the task: (returnval){ [ 618.206110] env[69328]: value = "task-3272743" [ 618.206110] env[69328]: _type = "Task" [ 618.206110] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.212740] env[69328]: DEBUG oslo_vmware.api [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Task: {'id': task-3272743, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.305234] env[69328]: DEBUG oslo_vmware.api [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Task: {'id': task-3272741, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.576057} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.305234] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] a798c3f2-ccde-488e-8a14-21f4a04f8e12/a798c3f2-ccde-488e-8a14-21f4a04f8e12.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 618.305234] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 618.305388] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-646da0b9-df1d-4220-995f-87dfaad5f228 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.312898] env[69328]: DEBUG oslo_vmware.api [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Waiting for the task: (returnval){ [ 618.312898] env[69328]: value = "task-3272744" [ 618.312898] env[69328]: _type = "Task" [ 618.312898] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.322344] env[69328]: DEBUG oslo_vmware.api [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Task: {'id': task-3272744, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.582071] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Acquiring lock "5b0e8bef-dcfc-4c5e-89d2-aa1748050d29" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 618.582319] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Lock "5b0e8bef-dcfc-4c5e-89d2-aa1748050d29" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 618.588055] env[69328]: DEBUG oslo_vmware.api [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Task: {'id': task-3272740, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.613785] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5cce038a-cabe-4692-8cb7-6d8aae040cd1 tempest-ServerExternalEventsTest-1092344189 tempest-ServerExternalEventsTest-1092344189-project] Releasing lock "refresh_cache-d97dc6d5-e55f-4b9e-91e6-cfdea82f5236" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 618.663286] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d6a49c30-7556-4612-8fbd-a35acb80e9bd tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "ed10d511-dbed-4884-8ac6-f737173f62c5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 618.664407] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d6a49c30-7556-4612-8fbd-a35acb80e9bd tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "ed10d511-dbed-4884-8ac6-f737173f62c5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 618.664407] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d6a49c30-7556-4612-8fbd-a35acb80e9bd tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "ed10d511-dbed-4884-8ac6-f737173f62c5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 618.664407] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d6a49c30-7556-4612-8fbd-a35acb80e9bd tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "ed10d511-dbed-4884-8ac6-f737173f62c5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 618.664407] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d6a49c30-7556-4612-8fbd-a35acb80e9bd tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "ed10d511-dbed-4884-8ac6-f737173f62c5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 618.667835] env[69328]: INFO nova.compute.manager [None req-d6a49c30-7556-4612-8fbd-a35acb80e9bd tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: ed10d511-dbed-4884-8ac6-f737173f62c5] Terminating instance [ 618.715611] env[69328]: DEBUG oslo_vmware.api [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Task: {'id': task-3272743, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.818983] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7163410-29e0-4a58-b239-8da4a78e0b04 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.828600] env[69328]: DEBUG oslo_vmware.api [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Task: {'id': task-3272744, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069192} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.829573] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b95c5502-4913-40c0-875a-a20e21ae1ae5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.833118] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 618.833853] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8a94afe-92e7-4866-88bf-fb33e03b7b4b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.857181] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Reconfiguring VM instance instance-0000000b to attach disk [datastore1] a798c3f2-ccde-488e-8a14-21f4a04f8e12/a798c3f2-ccde-488e-8a14-21f4a04f8e12.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 618.883775] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-168df2a5-caaa-446f-97ff-97701262267d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.900074] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb9de69f-2162-4463-8346-12f5c8186973 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.908029] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdb7fc62-f822-4e36-b0d5-73b0b28921f0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.913174] env[69328]: DEBUG oslo_vmware.api [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Waiting for the task: (returnval){ [ 618.913174] env[69328]: value = "task-3272745" [ 618.913174] env[69328]: _type = "Task" [ 618.913174] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.924649] env[69328]: DEBUG nova.compute.provider_tree [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 618.931431] env[69328]: DEBUG oslo_vmware.api [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Task: {'id': task-3272745, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.983944] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e22be9cc-836e-4e91-b58f-4d11cbfc8247 tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Acquiring lock "d97dc6d5-e55f-4b9e-91e6-cfdea82f5236" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 618.984283] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e22be9cc-836e-4e91-b58f-4d11cbfc8247 tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Lock "d97dc6d5-e55f-4b9e-91e6-cfdea82f5236" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 618.984630] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e22be9cc-836e-4e91-b58f-4d11cbfc8247 tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Acquiring lock "d97dc6d5-e55f-4b9e-91e6-cfdea82f5236-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 618.984717] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e22be9cc-836e-4e91-b58f-4d11cbfc8247 tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Lock "d97dc6d5-e55f-4b9e-91e6-cfdea82f5236-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 618.984872] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e22be9cc-836e-4e91-b58f-4d11cbfc8247 tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Lock "d97dc6d5-e55f-4b9e-91e6-cfdea82f5236-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 618.987130] env[69328]: INFO nova.compute.manager [None req-e22be9cc-836e-4e91-b58f-4d11cbfc8247 tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] Terminating instance [ 619.037798] env[69328]: DEBUG nova.compute.manager [req-3e36a577-fab7-437d-992c-b8435970143c req-e6712483-23f7-480c-a23d-b776b11a933e service nova] [instance: 50b84adc-5ff3-4a1e-a09f-5c96daef9b87] Received event network-vif-deleted-d79088e2-4f2b-49c3-bb85-8d8c7c108a8e {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 619.083210] env[69328]: DEBUG oslo_vmware.api [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Task: {'id': task-3272740, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.174261] env[69328]: DEBUG nova.compute.manager [None req-d6a49c30-7556-4612-8fbd-a35acb80e9bd tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: ed10d511-dbed-4884-8ac6-f737173f62c5] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 619.174261] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d6a49c30-7556-4612-8fbd-a35acb80e9bd tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: ed10d511-dbed-4884-8ac6-f737173f62c5] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 619.175328] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a09aa1a0-6b61-4446-8a7b-efde246f93fb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.191163] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6a49c30-7556-4612-8fbd-a35acb80e9bd tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: ed10d511-dbed-4884-8ac6-f737173f62c5] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 619.191667] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7a4ec9f0-02cd-4c49-885f-c365408ea26f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.201248] env[69328]: DEBUG oslo_vmware.api [None req-d6a49c30-7556-4612-8fbd-a35acb80e9bd tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 619.201248] env[69328]: value = "task-3272746" [ 619.201248] env[69328]: _type = "Task" [ 619.201248] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.217623] env[69328]: DEBUG oslo_vmware.api [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Task: {'id': task-3272743, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.221184] env[69328]: DEBUG oslo_vmware.api [None req-d6a49c30-7556-4612-8fbd-a35acb80e9bd tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3272746, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.425754] env[69328]: DEBUG oslo_vmware.api [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Task: {'id': task-3272745, 'name': ReconfigVM_Task, 'duration_secs': 0.306561} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.426641] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Reconfigured VM instance instance-0000000b to attach disk [datastore1] a798c3f2-ccde-488e-8a14-21f4a04f8e12/a798c3f2-ccde-488e-8a14-21f4a04f8e12.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 619.428121] env[69328]: DEBUG nova.scheduler.client.report [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 619.431851] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4ca12492-59eb-4984-9197-524d6472ecf9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.444711] env[69328]: DEBUG oslo_vmware.api [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Waiting for the task: (returnval){ [ 619.444711] env[69328]: value = "task-3272747" [ 619.444711] env[69328]: _type = "Task" [ 619.444711] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.460366] env[69328]: DEBUG oslo_vmware.api [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Task: {'id': task-3272747, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.495129] env[69328]: DEBUG nova.compute.manager [None req-e22be9cc-836e-4e91-b58f-4d11cbfc8247 tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 619.495129] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e22be9cc-836e-4e91-b58f-4d11cbfc8247 tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 619.496146] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95774b3a-84ab-4813-87b6-39efafcdc3db {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.507840] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e22be9cc-836e-4e91-b58f-4d11cbfc8247 tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 619.507840] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4fe951ac-b9f4-4f0b-92e9-2325ab5c5e4b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.516046] env[69328]: DEBUG oslo_vmware.api [None req-e22be9cc-836e-4e91-b58f-4d11cbfc8247 tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Waiting for the task: (returnval){ [ 619.516046] env[69328]: value = "task-3272748" [ 619.516046] env[69328]: _type = "Task" [ 619.516046] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.526474] env[69328]: DEBUG oslo_vmware.api [None req-e22be9cc-836e-4e91-b58f-4d11cbfc8247 tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Task: {'id': task-3272748, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.587720] env[69328]: DEBUG oslo_vmware.api [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Task: {'id': task-3272740, 'name': PowerOnVM_Task, 'duration_secs': 2.155878} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.587720] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 619.587720] env[69328]: INFO nova.compute.manager [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Took 12.46 seconds to spawn the instance on the hypervisor. [ 619.587720] env[69328]: DEBUG nova.compute.manager [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 619.588805] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a386dea0-d68c-4eae-a610-ff9fa0e515fb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.657081] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Acquiring lock "84baf472-6eb5-4c92-98eb-e35c14bca4e2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 619.657462] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Lock "84baf472-6eb5-4c92-98eb-e35c14bca4e2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 619.672613] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquiring lock "bc9c3a41-7264-4d69-bc15-397b5fa0a8ad" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 619.672834] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "bc9c3a41-7264-4d69-bc15-397b5fa0a8ad" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 619.714162] env[69328]: DEBUG oslo_vmware.api [None req-d6a49c30-7556-4612-8fbd-a35acb80e9bd tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3272746, 'name': PowerOffVM_Task, 'duration_secs': 0.297436} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.714877] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6a49c30-7556-4612-8fbd-a35acb80e9bd tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: ed10d511-dbed-4884-8ac6-f737173f62c5] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 619.715133] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d6a49c30-7556-4612-8fbd-a35acb80e9bd tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: ed10d511-dbed-4884-8ac6-f737173f62c5] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 619.715428] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2c288ce2-670c-4174-aa45-7c7a70385803 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.720146] env[69328]: DEBUG oslo_vmware.api [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Task: {'id': task-3272743, 'name': PowerOnVM_Task, 'duration_secs': 1.080401} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.720445] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 619.720686] env[69328]: INFO nova.compute.manager [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Took 7.41 seconds to spawn the instance on the hypervisor. [ 619.720902] env[69328]: DEBUG nova.compute.manager [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 619.721715] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06ed3af8-ec9b-4eab-8289-b8a603b3fe3b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.795784] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d6a49c30-7556-4612-8fbd-a35acb80e9bd tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: ed10d511-dbed-4884-8ac6-f737173f62c5] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 619.796041] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d6a49c30-7556-4612-8fbd-a35acb80e9bd tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: ed10d511-dbed-4884-8ac6-f737173f62c5] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 619.796234] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6a49c30-7556-4612-8fbd-a35acb80e9bd tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Deleting the datastore file [datastore1] ed10d511-dbed-4884-8ac6-f737173f62c5 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 619.796795] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e6af56c1-7aa9-41b3-80fe-84b3d83bb50a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.804610] env[69328]: DEBUG oslo_vmware.api [None req-d6a49c30-7556-4612-8fbd-a35acb80e9bd tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 619.804610] env[69328]: value = "task-3272750" [ 619.804610] env[69328]: _type = "Task" [ 619.804610] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.814807] env[69328]: DEBUG oslo_vmware.api [None req-d6a49c30-7556-4612-8fbd-a35acb80e9bd tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3272750, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.939670] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.474s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 619.940291] env[69328]: DEBUG nova.compute.manager [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] [instance: edb1a21a-6907-4198-a977-c1213e8fecc0] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 619.942949] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.889s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 619.944402] env[69328]: INFO nova.compute.claims [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 619.957973] env[69328]: DEBUG oslo_vmware.api [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Task: {'id': task-3272747, 'name': Rename_Task, 'duration_secs': 0.165626} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.958217] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 619.958471] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e154c7f9-49e7-4925-9c1b-e13807db3dbe {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.967649] env[69328]: DEBUG oslo_vmware.api [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Waiting for the task: (returnval){ [ 619.967649] env[69328]: value = "task-3272751" [ 619.967649] env[69328]: _type = "Task" [ 619.967649] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.975886] env[69328]: DEBUG oslo_vmware.api [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Task: {'id': task-3272751, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.024770] env[69328]: DEBUG oslo_vmware.api [None req-e22be9cc-836e-4e91-b58f-4d11cbfc8247 tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Task: {'id': task-3272748, 'name': PowerOffVM_Task, 'duration_secs': 0.347839} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.025039] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e22be9cc-836e-4e91-b58f-4d11cbfc8247 tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 620.025204] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e22be9cc-836e-4e91-b58f-4d11cbfc8247 tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 620.025448] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-28339f72-0694-443d-bf94-60fa33ea9a40 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.112675] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e22be9cc-836e-4e91-b58f-4d11cbfc8247 tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 620.115152] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e22be9cc-836e-4e91-b58f-4d11cbfc8247 tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 620.115152] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-e22be9cc-836e-4e91-b58f-4d11cbfc8247 tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Deleting the datastore file [datastore1] d97dc6d5-e55f-4b9e-91e6-cfdea82f5236 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 620.115803] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a4ecf68c-6f01-4230-b652-ef7c88733ce4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.118985] env[69328]: INFO nova.compute.manager [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Took 32.68 seconds to build instance. [ 620.125751] env[69328]: DEBUG oslo_vmware.api [None req-e22be9cc-836e-4e91-b58f-4d11cbfc8247 tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Waiting for the task: (returnval){ [ 620.125751] env[69328]: value = "task-3272753" [ 620.125751] env[69328]: _type = "Task" [ 620.125751] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.133707] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Acquiring lock "bbbfb48d-b474-4a6e-9078-336f23d2c343" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 620.134036] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Lock "bbbfb48d-b474-4a6e-9078-336f23d2c343" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 620.140034] env[69328]: DEBUG oslo_vmware.api [None req-e22be9cc-836e-4e91-b58f-4d11cbfc8247 tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Task: {'id': task-3272753, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.241026] env[69328]: INFO nova.compute.manager [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Took 16.85 seconds to build instance. [ 620.248136] env[69328]: DEBUG nova.compute.manager [req-3197586f-7686-4a38-934c-5bbaedde3a7f req-c260166a-551e-412b-b8e7-44a6329dc5fc service nova] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Received event network-vif-deleted-03adda47-e195-413d-85d7-5fd0c5a5027b {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 620.249031] env[69328]: DEBUG nova.compute.manager [req-3197586f-7686-4a38-934c-5bbaedde3a7f req-c260166a-551e-412b-b8e7-44a6329dc5fc service nova] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Received event network-changed-f00ec7b2-0d01-4e8c-b30b-50314520c094 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 620.249031] env[69328]: DEBUG nova.compute.manager [req-3197586f-7686-4a38-934c-5bbaedde3a7f req-c260166a-551e-412b-b8e7-44a6329dc5fc service nova] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Refreshing instance network info cache due to event network-changed-f00ec7b2-0d01-4e8c-b30b-50314520c094. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 620.249031] env[69328]: DEBUG oslo_concurrency.lockutils [req-3197586f-7686-4a38-934c-5bbaedde3a7f req-c260166a-551e-412b-b8e7-44a6329dc5fc service nova] Acquiring lock "refresh_cache-88f9f0c2-0c55-45bf-a494-8f1ee4922443" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 620.249425] env[69328]: DEBUG oslo_concurrency.lockutils [req-3197586f-7686-4a38-934c-5bbaedde3a7f req-c260166a-551e-412b-b8e7-44a6329dc5fc service nova] Acquired lock "refresh_cache-88f9f0c2-0c55-45bf-a494-8f1ee4922443" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 620.249773] env[69328]: DEBUG nova.network.neutron [req-3197586f-7686-4a38-934c-5bbaedde3a7f req-c260166a-551e-412b-b8e7-44a6329dc5fc service nova] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Refreshing network info cache for port f00ec7b2-0d01-4e8c-b30b-50314520c094 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 620.316994] env[69328]: DEBUG oslo_vmware.api [None req-d6a49c30-7556-4612-8fbd-a35acb80e9bd tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3272750, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.290139} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.317397] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6a49c30-7556-4612-8fbd-a35acb80e9bd tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 620.317774] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d6a49c30-7556-4612-8fbd-a35acb80e9bd tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: ed10d511-dbed-4884-8ac6-f737173f62c5] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 620.317915] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d6a49c30-7556-4612-8fbd-a35acb80e9bd tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: ed10d511-dbed-4884-8ac6-f737173f62c5] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 620.318225] env[69328]: INFO nova.compute.manager [None req-d6a49c30-7556-4612-8fbd-a35acb80e9bd tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: ed10d511-dbed-4884-8ac6-f737173f62c5] Took 1.14 seconds to destroy the instance on the hypervisor. [ 620.318620] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d6a49c30-7556-4612-8fbd-a35acb80e9bd tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 620.318955] env[69328]: DEBUG nova.compute.manager [-] [instance: ed10d511-dbed-4884-8ac6-f737173f62c5] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 620.319149] env[69328]: DEBUG nova.network.neutron [-] [instance: ed10d511-dbed-4884-8ac6-f737173f62c5] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 620.450383] env[69328]: DEBUG nova.compute.utils [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 620.457039] env[69328]: DEBUG nova.compute.manager [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] [instance: edb1a21a-6907-4198-a977-c1213e8fecc0] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 620.457039] env[69328]: DEBUG nova.network.neutron [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] [instance: edb1a21a-6907-4198-a977-c1213e8fecc0] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 620.481468] env[69328]: DEBUG oslo_vmware.api [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Task: {'id': task-3272751, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.578668] env[69328]: DEBUG nova.policy [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7a2d2283c9bd411fa022996cd7e965d8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'baf187118a5148ff872d234776a2db47', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 620.623660] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8ba064f7-ddaa-448f-bfaa-0439ff6b3ba8 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Lock "d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.194s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 620.637742] env[69328]: DEBUG oslo_vmware.api [None req-e22be9cc-836e-4e91-b58f-4d11cbfc8247 tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Task: {'id': task-3272753, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.217882} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.639259] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-e22be9cc-836e-4e91-b58f-4d11cbfc8247 tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 620.639375] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e22be9cc-836e-4e91-b58f-4d11cbfc8247 tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 620.639623] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e22be9cc-836e-4e91-b58f-4d11cbfc8247 tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 620.639842] env[69328]: INFO nova.compute.manager [None req-e22be9cc-836e-4e91-b58f-4d11cbfc8247 tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] Took 1.14 seconds to destroy the instance on the hypervisor. [ 620.640167] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e22be9cc-836e-4e91-b58f-4d11cbfc8247 tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 620.641417] env[69328]: DEBUG nova.compute.manager [-] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 620.641713] env[69328]: DEBUG nova.network.neutron [-] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 620.744191] env[69328]: DEBUG oslo_concurrency.lockutils [None req-30ebce40-b55b-4025-b33e-14d5221bd01f tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Lock "26feb2d1-ff64-4a13-af83-b6d5fe4348e1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.099s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 620.955973] env[69328]: DEBUG nova.compute.manager [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] [instance: edb1a21a-6907-4198-a977-c1213e8fecc0] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 620.991592] env[69328]: DEBUG oslo_vmware.api [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Task: {'id': task-3272751, 'name': PowerOnVM_Task, 'duration_secs': 0.641911} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.992399] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 620.993133] env[69328]: INFO nova.compute.manager [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Took 11.26 seconds to spawn the instance on the hypervisor. [ 620.993133] env[69328]: DEBUG nova.compute.manager [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 620.993825] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db9669be-1c52-415f-a9da-213ed2a475f7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.128101] env[69328]: DEBUG nova.compute.manager [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 9753734d-90f0-4661-8029-ec312e88eb60] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 621.247456] env[69328]: DEBUG nova.compute.manager [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 621.302220] env[69328]: DEBUG nova.network.neutron [req-3197586f-7686-4a38-934c-5bbaedde3a7f req-c260166a-551e-412b-b8e7-44a6329dc5fc service nova] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Updated VIF entry in instance network info cache for port f00ec7b2-0d01-4e8c-b30b-50314520c094. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 621.303394] env[69328]: DEBUG nova.network.neutron [req-3197586f-7686-4a38-934c-5bbaedde3a7f req-c260166a-551e-412b-b8e7-44a6329dc5fc service nova] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Updating instance_info_cache with network_info: [{"id": "f00ec7b2-0d01-4e8c-b30b-50314520c094", "address": "fa:16:3e:4c:b2:68", "network": {"id": "a1617aa6-27e2-4648-ad66-bff29c8d3d2a", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-897974516-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2da97117081d44cab074540e0b39d0e5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98f447de-d71e-41ef-bc37-ed97b4a1f58f", "external-id": "nsx-vlan-transportzone-904", "segmentation_id": 904, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf00ec7b2-0d", "ovs_interfaceid": "f00ec7b2-0d01-4e8c-b30b-50314520c094", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 621.320411] env[69328]: DEBUG nova.network.neutron [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] [instance: edb1a21a-6907-4198-a977-c1213e8fecc0] Successfully created port: c9854116-a48f-47e7-8b3e-7a2a04699cc8 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 621.504030] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34ff76bf-7940-4e87-88b1-1e29798a0b69 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.522160] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e86298dd-57bf-4a51-a019-c9ea6cd136cc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.527959] env[69328]: INFO nova.compute.manager [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Took 18.75 seconds to build instance. [ 621.573990] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f011edfb-cf39-4e66-a35c-ab53ff7c612b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.584477] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-949171f5-3f3e-4daa-9403-d446e985b35c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.603618] env[69328]: DEBUG nova.compute.provider_tree [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 621.614225] env[69328]: INFO nova.compute.manager [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Rebuilding instance [ 621.667020] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 621.687036] env[69328]: DEBUG nova.compute.manager [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 621.687036] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed84dec3-370c-4568-a250-f058f2005a2b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.708840] env[69328]: DEBUG nova.network.neutron [-] [instance: ed10d511-dbed-4884-8ac6-f737173f62c5] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 621.787139] env[69328]: DEBUG oslo_concurrency.lockutils [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 621.809406] env[69328]: DEBUG oslo_concurrency.lockutils [req-3197586f-7686-4a38-934c-5bbaedde3a7f req-c260166a-551e-412b-b8e7-44a6329dc5fc service nova] Releasing lock "refresh_cache-88f9f0c2-0c55-45bf-a494-8f1ee4922443" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 621.878550] env[69328]: DEBUG nova.network.neutron [-] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 621.970927] env[69328]: DEBUG nova.compute.manager [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] [instance: edb1a21a-6907-4198-a977-c1213e8fecc0] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 622.000613] env[69328]: DEBUG nova.virt.hardware [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 622.000936] env[69328]: DEBUG nova.virt.hardware [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 622.001063] env[69328]: DEBUG nova.virt.hardware [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 622.001221] env[69328]: DEBUG nova.virt.hardware [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 622.001345] env[69328]: DEBUG nova.virt.hardware [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 622.001493] env[69328]: DEBUG nova.virt.hardware [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 622.001705] env[69328]: DEBUG nova.virt.hardware [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 622.001891] env[69328]: DEBUG nova.virt.hardware [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 622.002204] env[69328]: DEBUG nova.virt.hardware [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 622.002367] env[69328]: DEBUG nova.virt.hardware [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 622.002546] env[69328]: DEBUG nova.virt.hardware [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 622.004471] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92ce2921-4cd0-4ef5-aa01-66b08c2139a2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.013788] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6273a860-b33c-46ff-8b03-52e088c57ab8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.032670] env[69328]: DEBUG oslo_concurrency.lockutils [None req-17e3fe22-8fc8-48f1-854e-d3612a1581ad tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Lock "a798c3f2-ccde-488e-8a14-21f4a04f8e12" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.598s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 622.108446] env[69328]: DEBUG nova.scheduler.client.report [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 622.134507] env[69328]: DEBUG nova.compute.manager [req-e5065fca-f068-4de5-aae2-8c57f4327e92 req-b91c0ce3-2298-4a26-be6d-81174be5f4f3 service nova] [instance: ed10d511-dbed-4884-8ac6-f737173f62c5] Received event network-vif-deleted-742f3021-311f-4b36-9507-03a493f2b49f {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 622.208774] env[69328]: INFO nova.compute.manager [-] [instance: ed10d511-dbed-4884-8ac6-f737173f62c5] Took 1.89 seconds to deallocate network for instance. [ 622.387653] env[69328]: INFO nova.compute.manager [-] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] Took 1.75 seconds to deallocate network for instance. [ 622.535795] env[69328]: DEBUG nova.compute.manager [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 622.620629] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.677s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 622.621666] env[69328]: DEBUG nova.compute.manager [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 622.624595] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.843s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 622.626455] env[69328]: INFO nova.compute.claims [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] [instance: f428f9a9-d792-4c1c-b2d4-ea066cc09d67] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 622.714725] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 622.717883] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b83d15b4-6b3e-4218-b13b-0047b695c0c7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.718313] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d6a49c30-7556-4612-8fbd-a35acb80e9bd tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 622.728790] env[69328]: DEBUG oslo_vmware.api [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Waiting for the task: (returnval){ [ 622.728790] env[69328]: value = "task-3272754" [ 622.728790] env[69328]: _type = "Task" [ 622.728790] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.744214] env[69328]: DEBUG oslo_vmware.api [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Task: {'id': task-3272754, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.746955] env[69328]: DEBUG nova.compute.manager [req-2b2b266c-4b5e-4040-a0d3-58f408d6b83c req-cbe40fc6-6db5-42fd-ad10-dee27a4ae0d6 service nova] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Received event network-changed-09c4fb65-f87f-4fdc-9a85-cf73224a3ca3 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 622.747177] env[69328]: DEBUG nova.compute.manager [req-2b2b266c-4b5e-4040-a0d3-58f408d6b83c req-cbe40fc6-6db5-42fd-ad10-dee27a4ae0d6 service nova] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Refreshing instance network info cache due to event network-changed-09c4fb65-f87f-4fdc-9a85-cf73224a3ca3. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 622.747460] env[69328]: DEBUG oslo_concurrency.lockutils [req-2b2b266c-4b5e-4040-a0d3-58f408d6b83c req-cbe40fc6-6db5-42fd-ad10-dee27a4ae0d6 service nova] Acquiring lock "refresh_cache-d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 622.747547] env[69328]: DEBUG oslo_concurrency.lockutils [req-2b2b266c-4b5e-4040-a0d3-58f408d6b83c req-cbe40fc6-6db5-42fd-ad10-dee27a4ae0d6 service nova] Acquired lock "refresh_cache-d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 622.747703] env[69328]: DEBUG nova.network.neutron [req-2b2b266c-4b5e-4040-a0d3-58f408d6b83c req-cbe40fc6-6db5-42fd-ad10-dee27a4ae0d6 service nova] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Refreshing network info cache for port 09c4fb65-f87f-4fdc-9a85-cf73224a3ca3 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 622.896844] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e22be9cc-836e-4e91-b58f-4d11cbfc8247 tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 623.077338] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 623.133441] env[69328]: DEBUG nova.compute.utils [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 623.137528] env[69328]: DEBUG nova.compute.manager [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 623.137729] env[69328]: DEBUG nova.network.neutron [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 623.246717] env[69328]: DEBUG oslo_vmware.api [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Task: {'id': task-3272754, 'name': PowerOffVM_Task, 'duration_secs': 0.134043} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.248522] env[69328]: DEBUG nova.policy [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'adea8f3e148442e691f99af03f894a4c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1467d48a61f7410b8f6d5a981d169563', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 623.250920] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 623.250920] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 623.251361] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da092305-e7ae-4070-ae81-31049cc64be6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.264074] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 623.264594] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-10472af0-7938-4cd9-a5b3-7e0cf7ea74d1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.301847] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 623.302505] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 623.303125] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Deleting the datastore file [datastore1] 26feb2d1-ff64-4a13-af83-b6d5fe4348e1 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 623.303932] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-15c837ca-05df-4710-a45d-2fbb154b4baa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.315282] env[69328]: DEBUG oslo_vmware.api [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Waiting for the task: (returnval){ [ 623.315282] env[69328]: value = "task-3272756" [ 623.315282] env[69328]: _type = "Task" [ 623.315282] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.337666] env[69328]: DEBUG oslo_vmware.api [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Task: {'id': task-3272756, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.558126] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0ea93919-e76c-479d-8b58-9211c6c69435 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Acquiring lock "88f9f0c2-0c55-45bf-a494-8f1ee4922443" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 623.558467] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0ea93919-e76c-479d-8b58-9211c6c69435 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Lock "88f9f0c2-0c55-45bf-a494-8f1ee4922443" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 623.558731] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0ea93919-e76c-479d-8b58-9211c6c69435 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Acquiring lock "88f9f0c2-0c55-45bf-a494-8f1ee4922443-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 623.558985] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0ea93919-e76c-479d-8b58-9211c6c69435 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Lock "88f9f0c2-0c55-45bf-a494-8f1ee4922443-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 623.559227] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0ea93919-e76c-479d-8b58-9211c6c69435 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Lock "88f9f0c2-0c55-45bf-a494-8f1ee4922443-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 623.564642] env[69328]: INFO nova.compute.manager [None req-0ea93919-e76c-479d-8b58-9211c6c69435 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Terminating instance [ 623.616423] env[69328]: DEBUG nova.network.neutron [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] [instance: edb1a21a-6907-4198-a977-c1213e8fecc0] Successfully updated port: c9854116-a48f-47e7-8b3e-7a2a04699cc8 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 623.643113] env[69328]: DEBUG nova.compute.manager [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 623.833183] env[69328]: DEBUG oslo_vmware.api [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Task: {'id': task-3272756, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.193507} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.835662] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 623.836057] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 623.836187] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 623.936211] env[69328]: DEBUG nova.network.neutron [req-2b2b266c-4b5e-4040-a0d3-58f408d6b83c req-cbe40fc6-6db5-42fd-ad10-dee27a4ae0d6 service nova] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Updated VIF entry in instance network info cache for port 09c4fb65-f87f-4fdc-9a85-cf73224a3ca3. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 623.936760] env[69328]: DEBUG nova.network.neutron [req-2b2b266c-4b5e-4040-a0d3-58f408d6b83c req-cbe40fc6-6db5-42fd-ad10-dee27a4ae0d6 service nova] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Updating instance_info_cache with network_info: [{"id": "09c4fb65-f87f-4fdc-9a85-cf73224a3ca3", "address": "fa:16:3e:e6:08:a2", "network": {"id": "b7b15f77-0584-4f19-a05e-67df3efe1b9d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-778653716-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8502178b3d334c338b63dfde3eae8f08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d275d7c6-2a7b-4ee8-b6f4-fabf1ba1905f", "external-id": "nsx-vlan-transportzone-513", "segmentation_id": 513, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09c4fb65-f8", "ovs_interfaceid": "09c4fb65-f87f-4fdc-9a85-cf73224a3ca3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 623.991616] env[69328]: DEBUG nova.network.neutron [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Successfully created port: e401a888-b320-4f5f-bcdc-5d8c86b99ce7 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 624.068814] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0ceca4e-4a0b-438b-8356-3b349db5c1a9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.073193] env[69328]: DEBUG nova.compute.manager [None req-0ea93919-e76c-479d-8b58-9211c6c69435 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 624.073636] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0ea93919-e76c-479d-8b58-9211c6c69435 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 624.075182] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36371e6e-c717-4ae4-b7e3-6f0b78d01e50 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.087530] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b0be7c8-545c-46c8-988f-3144b560807c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.091966] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ea93919-e76c-479d-8b58-9211c6c69435 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 624.092635] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7794d6b7-4f7a-4d16-bd03-501ad35ab51e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.130924] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Acquiring lock "refresh_cache-edb1a21a-6907-4198-a977-c1213e8fecc0" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 624.131142] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Acquired lock "refresh_cache-edb1a21a-6907-4198-a977-c1213e8fecc0" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 624.131321] env[69328]: DEBUG nova.network.neutron [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] [instance: edb1a21a-6907-4198-a977-c1213e8fecc0] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 624.134891] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-483ce509-dd3c-4abe-842f-679287ecdeb5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.138336] env[69328]: DEBUG oslo_vmware.api [None req-0ea93919-e76c-479d-8b58-9211c6c69435 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Waiting for the task: (returnval){ [ 624.138336] env[69328]: value = "task-3272757" [ 624.138336] env[69328]: _type = "Task" [ 624.138336] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.149636] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28595633-6bd9-41e5-b3a4-5ed5426d0917 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.164024] env[69328]: DEBUG oslo_vmware.api [None req-0ea93919-e76c-479d-8b58-9211c6c69435 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Task: {'id': task-3272757, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.172991] env[69328]: DEBUG nova.compute.provider_tree [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 624.439182] env[69328]: DEBUG oslo_concurrency.lockutils [req-2b2b266c-4b5e-4040-a0d3-58f408d6b83c req-cbe40fc6-6db5-42fd-ad10-dee27a4ae0d6 service nova] Releasing lock "refresh_cache-d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 624.664217] env[69328]: DEBUG nova.compute.manager [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 624.667654] env[69328]: DEBUG oslo_vmware.api [None req-0ea93919-e76c-479d-8b58-9211c6c69435 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Task: {'id': task-3272757, 'name': PowerOffVM_Task, 'duration_secs': 0.52006} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.668168] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ea93919-e76c-479d-8b58-9211c6c69435 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 624.672919] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0ea93919-e76c-479d-8b58-9211c6c69435 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 624.673237] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b6a3bfba-1caf-4804-ab9d-e33b838410a6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.678796] env[69328]: DEBUG nova.scheduler.client.report [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 624.703153] env[69328]: DEBUG nova.virt.hardware [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:35:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='1218380119',id=28,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-1650243157',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 624.703153] env[69328]: DEBUG nova.virt.hardware [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 624.703153] env[69328]: DEBUG nova.virt.hardware [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 624.703345] env[69328]: DEBUG nova.virt.hardware [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 624.706408] env[69328]: DEBUG nova.virt.hardware [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 624.706408] env[69328]: DEBUG nova.virt.hardware [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 624.706408] env[69328]: DEBUG nova.virt.hardware [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 624.706408] env[69328]: DEBUG nova.virt.hardware [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 624.706408] env[69328]: DEBUG nova.virt.hardware [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 624.706738] env[69328]: DEBUG nova.virt.hardware [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 624.706738] env[69328]: DEBUG nova.virt.hardware [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 624.706738] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c4aff04-0af0-4c15-846b-a77a5a086506 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.711412] env[69328]: DEBUG nova.network.neutron [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] [instance: edb1a21a-6907-4198-a977-c1213e8fecc0] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 624.724076] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c00ec91d-c44f-4273-8db1-010083189b2b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.749519] env[69328]: DEBUG nova.compute.manager [req-574fa5e3-4b46-41ae-b281-f14f779fadaf req-89de2662-25ef-47ac-8905-241f37682d19 service nova] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] Received event network-vif-deleted-b801ae0c-2061-4103-8530-3d58f8785333 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 624.764311] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0ea93919-e76c-479d-8b58-9211c6c69435 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 624.764631] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0ea93919-e76c-479d-8b58-9211c6c69435 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 624.764813] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ea93919-e76c-479d-8b58-9211c6c69435 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Deleting the datastore file [datastore2] 88f9f0c2-0c55-45bf-a494-8f1ee4922443 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 624.765440] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eed8440a-e53c-4182-9d70-476531d3255d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.775309] env[69328]: DEBUG oslo_vmware.api [None req-0ea93919-e76c-479d-8b58-9211c6c69435 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Waiting for the task: (returnval){ [ 624.775309] env[69328]: value = "task-3272759" [ 624.775309] env[69328]: _type = "Task" [ 624.775309] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.790344] env[69328]: DEBUG oslo_vmware.api [None req-0ea93919-e76c-479d-8b58-9211c6c69435 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Task: {'id': task-3272759, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.881121] env[69328]: DEBUG nova.virt.hardware [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 624.881121] env[69328]: DEBUG nova.virt.hardware [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 624.881121] env[69328]: DEBUG nova.virt.hardware [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 624.881121] env[69328]: DEBUG nova.virt.hardware [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 624.881599] env[69328]: DEBUG nova.virt.hardware [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 624.881599] env[69328]: DEBUG nova.virt.hardware [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 624.881599] env[69328]: DEBUG nova.virt.hardware [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 624.881599] env[69328]: DEBUG nova.virt.hardware [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 624.881739] env[69328]: DEBUG nova.virt.hardware [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 624.881862] env[69328]: DEBUG nova.virt.hardware [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 624.882044] env[69328]: DEBUG nova.virt.hardware [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 624.883610] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57d86dac-3bed-4af1-a421-a5d3a0007f1d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.893966] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d391854f-d207-4528-9aec-5010419f71ea {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.912826] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Instance VIF info [] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 624.919174] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 624.919439] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 624.919916] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cb75f23c-f4a3-4221-bd56-e4c98058c5e2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.939169] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 624.939169] env[69328]: value = "task-3272760" [ 624.939169] env[69328]: _type = "Task" [ 624.939169] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.950568] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272760, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.186904] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.562s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 625.186904] env[69328]: DEBUG nova.compute.manager [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] [instance: f428f9a9-d792-4c1c-b2d4-ea066cc09d67] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 625.190568] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.956s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 625.192519] env[69328]: INFO nova.compute.claims [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 625.233215] env[69328]: DEBUG nova.network.neutron [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] [instance: edb1a21a-6907-4198-a977-c1213e8fecc0] Updating instance_info_cache with network_info: [{"id": "c9854116-a48f-47e7-8b3e-7a2a04699cc8", "address": "fa:16:3e:36:39:58", "network": {"id": "476fe2b5-7e7f-4e61-ae35-6ea0ae8e3cb8", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-755380080-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "baf187118a5148ff872d234776a2db47", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bf86b133-2b7b-4cab-8f6f-5a0856d34c7b", "external-id": "nsx-vlan-transportzone-557", "segmentation_id": 557, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9854116-a4", "ovs_interfaceid": "c9854116-a48f-47e7-8b3e-7a2a04699cc8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 625.291109] env[69328]: DEBUG oslo_vmware.api [None req-0ea93919-e76c-479d-8b58-9211c6c69435 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Task: {'id': task-3272759, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.314985} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.291420] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ea93919-e76c-479d-8b58-9211c6c69435 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 625.291609] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0ea93919-e76c-479d-8b58-9211c6c69435 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 625.291793] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0ea93919-e76c-479d-8b58-9211c6c69435 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 625.291940] env[69328]: INFO nova.compute.manager [None req-0ea93919-e76c-479d-8b58-9211c6c69435 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Took 1.22 seconds to destroy the instance on the hypervisor. [ 625.292195] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0ea93919-e76c-479d-8b58-9211c6c69435 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 625.292424] env[69328]: DEBUG nova.compute.manager [-] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 625.292460] env[69328]: DEBUG nova.network.neutron [-] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 625.363561] env[69328]: DEBUG nova.compute.manager [req-f3b6442c-9c51-4fb0-a0d7-7c0e7d4d1abb req-8a504a34-51ff-4e4e-b4d9-5658f8e258fb service nova] [instance: edb1a21a-6907-4198-a977-c1213e8fecc0] Received event network-vif-plugged-c9854116-a48f-47e7-8b3e-7a2a04699cc8 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 625.364082] env[69328]: DEBUG oslo_concurrency.lockutils [req-f3b6442c-9c51-4fb0-a0d7-7c0e7d4d1abb req-8a504a34-51ff-4e4e-b4d9-5658f8e258fb service nova] Acquiring lock "edb1a21a-6907-4198-a977-c1213e8fecc0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 625.364082] env[69328]: DEBUG oslo_concurrency.lockutils [req-f3b6442c-9c51-4fb0-a0d7-7c0e7d4d1abb req-8a504a34-51ff-4e4e-b4d9-5658f8e258fb service nova] Lock "edb1a21a-6907-4198-a977-c1213e8fecc0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 625.364650] env[69328]: DEBUG oslo_concurrency.lockutils [req-f3b6442c-9c51-4fb0-a0d7-7c0e7d4d1abb req-8a504a34-51ff-4e4e-b4d9-5658f8e258fb service nova] Lock "edb1a21a-6907-4198-a977-c1213e8fecc0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 625.364650] env[69328]: DEBUG nova.compute.manager [req-f3b6442c-9c51-4fb0-a0d7-7c0e7d4d1abb req-8a504a34-51ff-4e4e-b4d9-5658f8e258fb service nova] [instance: edb1a21a-6907-4198-a977-c1213e8fecc0] No waiting events found dispatching network-vif-plugged-c9854116-a48f-47e7-8b3e-7a2a04699cc8 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 625.364650] env[69328]: WARNING nova.compute.manager [req-f3b6442c-9c51-4fb0-a0d7-7c0e7d4d1abb req-8a504a34-51ff-4e4e-b4d9-5658f8e258fb service nova] [instance: edb1a21a-6907-4198-a977-c1213e8fecc0] Received unexpected event network-vif-plugged-c9854116-a48f-47e7-8b3e-7a2a04699cc8 for instance with vm_state building and task_state spawning. [ 625.364650] env[69328]: DEBUG nova.compute.manager [req-f3b6442c-9c51-4fb0-a0d7-7c0e7d4d1abb req-8a504a34-51ff-4e4e-b4d9-5658f8e258fb service nova] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Received event network-changed-09c4fb65-f87f-4fdc-9a85-cf73224a3ca3 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 625.364819] env[69328]: DEBUG nova.compute.manager [req-f3b6442c-9c51-4fb0-a0d7-7c0e7d4d1abb req-8a504a34-51ff-4e4e-b4d9-5658f8e258fb service nova] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Refreshing instance network info cache due to event network-changed-09c4fb65-f87f-4fdc-9a85-cf73224a3ca3. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 625.365548] env[69328]: DEBUG oslo_concurrency.lockutils [req-f3b6442c-9c51-4fb0-a0d7-7c0e7d4d1abb req-8a504a34-51ff-4e4e-b4d9-5658f8e258fb service nova] Acquiring lock "refresh_cache-d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 625.365548] env[69328]: DEBUG oslo_concurrency.lockutils [req-f3b6442c-9c51-4fb0-a0d7-7c0e7d4d1abb req-8a504a34-51ff-4e4e-b4d9-5658f8e258fb service nova] Acquired lock "refresh_cache-d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 625.365548] env[69328]: DEBUG nova.network.neutron [req-f3b6442c-9c51-4fb0-a0d7-7c0e7d4d1abb req-8a504a34-51ff-4e4e-b4d9-5658f8e258fb service nova] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Refreshing network info cache for port 09c4fb65-f87f-4fdc-9a85-cf73224a3ca3 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 625.453662] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272760, 'name': CreateVM_Task, 'duration_secs': 0.318114} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.453769] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 625.454331] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 625.454403] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 625.454657] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 625.454915] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa550b00-280f-4f84-969d-ea0684e7d2b8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.460533] env[69328]: DEBUG oslo_vmware.api [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Waiting for the task: (returnval){ [ 625.460533] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e47760-3e79-70ca-bbd9-806faa987398" [ 625.460533] env[69328]: _type = "Task" [ 625.460533] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.470339] env[69328]: DEBUG oslo_vmware.api [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e47760-3e79-70ca-bbd9-806faa987398, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.698893] env[69328]: DEBUG nova.compute.utils [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 625.707382] env[69328]: DEBUG nova.compute.manager [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] [instance: f428f9a9-d792-4c1c-b2d4-ea066cc09d67] Not allocating networking since 'none' was specified. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 625.738991] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Releasing lock "refresh_cache-edb1a21a-6907-4198-a977-c1213e8fecc0" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 625.739347] env[69328]: DEBUG nova.compute.manager [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] [instance: edb1a21a-6907-4198-a977-c1213e8fecc0] Instance network_info: |[{"id": "c9854116-a48f-47e7-8b3e-7a2a04699cc8", "address": "fa:16:3e:36:39:58", "network": {"id": "476fe2b5-7e7f-4e61-ae35-6ea0ae8e3cb8", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-755380080-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "baf187118a5148ff872d234776a2db47", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bf86b133-2b7b-4cab-8f6f-5a0856d34c7b", "external-id": "nsx-vlan-transportzone-557", "segmentation_id": 557, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9854116-a4", "ovs_interfaceid": "c9854116-a48f-47e7-8b3e-7a2a04699cc8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 625.740559] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] [instance: edb1a21a-6907-4198-a977-c1213e8fecc0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:36:39:58', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bf86b133-2b7b-4cab-8f6f-5a0856d34c7b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c9854116-a48f-47e7-8b3e-7a2a04699cc8', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 625.752020] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Creating folder: Project (baf187118a5148ff872d234776a2db47). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 625.752020] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6d6f74fd-0eab-40fb-9e9d-f7e01b882c17 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.769331] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Created folder: Project (baf187118a5148ff872d234776a2db47) in parent group-v653649. [ 625.769535] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Creating folder: Instances. Parent ref: group-v653685. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 625.769790] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7cd655dc-44c2-46f2-8b29-90e5ce9802ff {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.782172] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Created folder: Instances in parent group-v653685. [ 625.782172] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 625.782172] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: edb1a21a-6907-4198-a977-c1213e8fecc0] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 625.782172] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-195194e0-bf43-485c-9242-6127c7fc88c6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.816491] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 625.816491] env[69328]: value = "task-3272763" [ 625.816491] env[69328]: _type = "Task" [ 625.816491] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.830799] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272763, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.985638] env[69328]: DEBUG oslo_vmware.api [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e47760-3e79-70ca-bbd9-806faa987398, 'name': SearchDatastore_Task, 'duration_secs': 0.024734} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.985638] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 625.985638] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 625.985638] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 625.987161] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 625.989270] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 625.989270] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-38cc59cb-21e0-48e2-9ab0-aea4a3cbb4f7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.006597] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 626.006872] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 626.007985] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0140b086-b2de-4501-9054-babb20977b63 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.018134] env[69328]: DEBUG oslo_vmware.api [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Waiting for the task: (returnval){ [ 626.018134] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52381def-2939-2f3e-4e63-d88b114b70d8" [ 626.018134] env[69328]: _type = "Task" [ 626.018134] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.031656] env[69328]: DEBUG oslo_vmware.api [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52381def-2939-2f3e-4e63-d88b114b70d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.208433] env[69328]: DEBUG nova.compute.manager [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] [instance: f428f9a9-d792-4c1c-b2d4-ea066cc09d67] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 626.336382] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272763, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.531471] env[69328]: DEBUG oslo_vmware.api [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52381def-2939-2f3e-4e63-d88b114b70d8, 'name': SearchDatastore_Task, 'duration_secs': 0.023182} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.532395] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69b5f72b-857c-4d0e-bba4-509316705db7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.543742] env[69328]: DEBUG oslo_vmware.api [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Waiting for the task: (returnval){ [ 626.543742] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5204a7d5-eab9-0c40-960e-54bb8da15f93" [ 626.543742] env[69328]: _type = "Task" [ 626.543742] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.550276] env[69328]: DEBUG nova.network.neutron [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Successfully updated port: e401a888-b320-4f5f-bcdc-5d8c86b99ce7 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 626.557720] env[69328]: DEBUG oslo_vmware.api [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5204a7d5-eab9-0c40-960e-54bb8da15f93, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.644854] env[69328]: DEBUG nova.network.neutron [-] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 626.666243] env[69328]: DEBUG nova.network.neutron [req-f3b6442c-9c51-4fb0-a0d7-7c0e7d4d1abb req-8a504a34-51ff-4e4e-b4d9-5658f8e258fb service nova] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Updated VIF entry in instance network info cache for port 09c4fb65-f87f-4fdc-9a85-cf73224a3ca3. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 626.666633] env[69328]: DEBUG nova.network.neutron [req-f3b6442c-9c51-4fb0-a0d7-7c0e7d4d1abb req-8a504a34-51ff-4e4e-b4d9-5658f8e258fb service nova] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Updating instance_info_cache with network_info: [{"id": "09c4fb65-f87f-4fdc-9a85-cf73224a3ca3", "address": "fa:16:3e:e6:08:a2", "network": {"id": "b7b15f77-0584-4f19-a05e-67df3efe1b9d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-778653716-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8502178b3d334c338b63dfde3eae8f08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d275d7c6-2a7b-4ee8-b6f4-fabf1ba1905f", "external-id": "nsx-vlan-transportzone-513", "segmentation_id": 513, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09c4fb65-f8", "ovs_interfaceid": "09c4fb65-f87f-4fdc-9a85-cf73224a3ca3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 626.676856] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9d791b5-6f34-4c32-b7d6-7ce6c769c6d7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.688022] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5ae0f32-d9b0-4ee3-8c33-0d831931177f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.732048] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24c91183-5030-431c-80ba-6fa4ba9edda1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.742050] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d6d97ac-1dd7-41f4-965e-ea955326dff3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.760154] env[69328]: DEBUG nova.compute.provider_tree [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 626.829705] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272763, 'name': CreateVM_Task, 'duration_secs': 0.552518} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.829889] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: edb1a21a-6907-4198-a977-c1213e8fecc0] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 626.830745] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 626.830944] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 626.831294] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 626.831645] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52e90fbc-c7b0-46c4-a679-6133776db97f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.840022] env[69328]: DEBUG oslo_vmware.api [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Waiting for the task: (returnval){ [ 626.840022] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ec18b9-7b8c-37d7-8eeb-5c00ab7a049e" [ 626.840022] env[69328]: _type = "Task" [ 626.840022] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.848488] env[69328]: DEBUG oslo_vmware.api [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ec18b9-7b8c-37d7-8eeb-5c00ab7a049e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.045292] env[69328]: DEBUG nova.compute.manager [req-c29559c4-4c9a-40d3-8bc5-55188906394b req-0efbc9b0-12db-4784-8f95-28836d12ce9c service nova] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Received event network-vif-plugged-e401a888-b320-4f5f-bcdc-5d8c86b99ce7 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 627.045747] env[69328]: DEBUG oslo_concurrency.lockutils [req-c29559c4-4c9a-40d3-8bc5-55188906394b req-0efbc9b0-12db-4784-8f95-28836d12ce9c service nova] Acquiring lock "bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 627.045882] env[69328]: DEBUG oslo_concurrency.lockutils [req-c29559c4-4c9a-40d3-8bc5-55188906394b req-0efbc9b0-12db-4784-8f95-28836d12ce9c service nova] Lock "bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 627.046821] env[69328]: DEBUG oslo_concurrency.lockutils [req-c29559c4-4c9a-40d3-8bc5-55188906394b req-0efbc9b0-12db-4784-8f95-28836d12ce9c service nova] Lock "bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 627.046821] env[69328]: DEBUG nova.compute.manager [req-c29559c4-4c9a-40d3-8bc5-55188906394b req-0efbc9b0-12db-4784-8f95-28836d12ce9c service nova] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] No waiting events found dispatching network-vif-plugged-e401a888-b320-4f5f-bcdc-5d8c86b99ce7 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 627.046821] env[69328]: WARNING nova.compute.manager [req-c29559c4-4c9a-40d3-8bc5-55188906394b req-0efbc9b0-12db-4784-8f95-28836d12ce9c service nova] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Received unexpected event network-vif-plugged-e401a888-b320-4f5f-bcdc-5d8c86b99ce7 for instance with vm_state building and task_state spawning. [ 627.046821] env[69328]: DEBUG nova.compute.manager [req-c29559c4-4c9a-40d3-8bc5-55188906394b req-0efbc9b0-12db-4784-8f95-28836d12ce9c service nova] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Received event network-changed-e401a888-b320-4f5f-bcdc-5d8c86b99ce7 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 627.047797] env[69328]: DEBUG nova.compute.manager [req-c29559c4-4c9a-40d3-8bc5-55188906394b req-0efbc9b0-12db-4784-8f95-28836d12ce9c service nova] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Refreshing instance network info cache due to event network-changed-e401a888-b320-4f5f-bcdc-5d8c86b99ce7. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 627.047797] env[69328]: DEBUG oslo_concurrency.lockutils [req-c29559c4-4c9a-40d3-8bc5-55188906394b req-0efbc9b0-12db-4784-8f95-28836d12ce9c service nova] Acquiring lock "refresh_cache-bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 627.047797] env[69328]: DEBUG oslo_concurrency.lockutils [req-c29559c4-4c9a-40d3-8bc5-55188906394b req-0efbc9b0-12db-4784-8f95-28836d12ce9c service nova] Acquired lock "refresh_cache-bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 627.047797] env[69328]: DEBUG nova.network.neutron [req-c29559c4-4c9a-40d3-8bc5-55188906394b req-0efbc9b0-12db-4784-8f95-28836d12ce9c service nova] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Refreshing network info cache for port e401a888-b320-4f5f-bcdc-5d8c86b99ce7 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 627.062155] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Acquiring lock "refresh_cache-bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 627.076811] env[69328]: DEBUG oslo_vmware.api [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5204a7d5-eab9-0c40-960e-54bb8da15f93, 'name': SearchDatastore_Task, 'duration_secs': 0.02422} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.077725] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 627.078036] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 26feb2d1-ff64-4a13-af83-b6d5fe4348e1/26feb2d1-ff64-4a13-af83-b6d5fe4348e1.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 627.078455] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e52b5786-c64e-4c23-99b4-b99a75e16516 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.094386] env[69328]: DEBUG oslo_vmware.api [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Waiting for the task: (returnval){ [ 627.094386] env[69328]: value = "task-3272764" [ 627.094386] env[69328]: _type = "Task" [ 627.094386] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.109588] env[69328]: DEBUG oslo_vmware.api [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Task: {'id': task-3272764, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.149982] env[69328]: INFO nova.compute.manager [-] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Took 1.86 seconds to deallocate network for instance. [ 627.171805] env[69328]: DEBUG oslo_concurrency.lockutils [req-f3b6442c-9c51-4fb0-a0d7-7c0e7d4d1abb req-8a504a34-51ff-4e4e-b4d9-5658f8e258fb service nova] Releasing lock "refresh_cache-d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 627.172352] env[69328]: DEBUG nova.compute.manager [req-f3b6442c-9c51-4fb0-a0d7-7c0e7d4d1abb req-8a504a34-51ff-4e4e-b4d9-5658f8e258fb service nova] [instance: edb1a21a-6907-4198-a977-c1213e8fecc0] Received event network-changed-c9854116-a48f-47e7-8b3e-7a2a04699cc8 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 627.172352] env[69328]: DEBUG nova.compute.manager [req-f3b6442c-9c51-4fb0-a0d7-7c0e7d4d1abb req-8a504a34-51ff-4e4e-b4d9-5658f8e258fb service nova] [instance: edb1a21a-6907-4198-a977-c1213e8fecc0] Refreshing instance network info cache due to event network-changed-c9854116-a48f-47e7-8b3e-7a2a04699cc8. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 627.172533] env[69328]: DEBUG oslo_concurrency.lockutils [req-f3b6442c-9c51-4fb0-a0d7-7c0e7d4d1abb req-8a504a34-51ff-4e4e-b4d9-5658f8e258fb service nova] Acquiring lock "refresh_cache-edb1a21a-6907-4198-a977-c1213e8fecc0" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 627.172750] env[69328]: DEBUG oslo_concurrency.lockutils [req-f3b6442c-9c51-4fb0-a0d7-7c0e7d4d1abb req-8a504a34-51ff-4e4e-b4d9-5658f8e258fb service nova] Acquired lock "refresh_cache-edb1a21a-6907-4198-a977-c1213e8fecc0" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 627.172750] env[69328]: DEBUG nova.network.neutron [req-f3b6442c-9c51-4fb0-a0d7-7c0e7d4d1abb req-8a504a34-51ff-4e4e-b4d9-5658f8e258fb service nova] [instance: edb1a21a-6907-4198-a977-c1213e8fecc0] Refreshing network info cache for port c9854116-a48f-47e7-8b3e-7a2a04699cc8 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 627.236924] env[69328]: DEBUG nova.compute.manager [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] [instance: f428f9a9-d792-4c1c-b2d4-ea066cc09d67] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 627.265475] env[69328]: DEBUG nova.scheduler.client.report [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 627.287881] env[69328]: DEBUG nova.virt.hardware [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 627.288199] env[69328]: DEBUG nova.virt.hardware [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 627.288363] env[69328]: DEBUG nova.virt.hardware [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 627.288569] env[69328]: DEBUG nova.virt.hardware [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 627.288785] env[69328]: DEBUG nova.virt.hardware [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 627.288995] env[69328]: DEBUG nova.virt.hardware [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 627.289338] env[69328]: DEBUG nova.virt.hardware [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 627.289500] env[69328]: DEBUG nova.virt.hardware [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 627.289705] env[69328]: DEBUG nova.virt.hardware [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 627.289877] env[69328]: DEBUG nova.virt.hardware [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 627.290106] env[69328]: DEBUG nova.virt.hardware [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 627.294029] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71fec1d7-b2d7-4f26-bd15-37e8a6bce90c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.304677] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79d66647-a132-4706-93a7-261fdfd92765 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.323099] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] [instance: f428f9a9-d792-4c1c-b2d4-ea066cc09d67] Instance VIF info [] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 627.328798] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Creating folder: Project (ed8882e64d684e5abc0b043f4471f643). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 627.329601] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d4d38ffc-ded1-4ae5-afd4-a47cf39b476b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.352705] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Created folder: Project (ed8882e64d684e5abc0b043f4471f643) in parent group-v653649. [ 627.354247] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Creating folder: Instances. Parent ref: group-v653688. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 627.354247] env[69328]: DEBUG oslo_vmware.api [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ec18b9-7b8c-37d7-8eeb-5c00ab7a049e, 'name': SearchDatastore_Task, 'duration_secs': 0.025391} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.354247] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2a97cf82-f7e3-42d5-b845-c27230606051 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.356383] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 627.356383] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] [instance: edb1a21a-6907-4198-a977-c1213e8fecc0] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 627.356383] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 627.356383] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 627.356665] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 627.356665] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-58241036-589e-462d-a9f1-dc87cd431f34 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.375466] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Created folder: Instances in parent group-v653688. [ 627.375466] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 627.375466] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f428f9a9-d792-4c1c-b2d4-ea066cc09d67] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 627.375466] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-43145b85-b57e-4399-bf36-8ef5787384ca {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.399417] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 627.399618] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 627.403229] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5bc3f26-fd61-46f2-8092-1975d7241240 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.410401] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 627.410401] env[69328]: value = "task-3272767" [ 627.410401] env[69328]: _type = "Task" [ 627.410401] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.421754] env[69328]: DEBUG oslo_vmware.api [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Waiting for the task: (returnval){ [ 627.421754] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5235433c-0663-1d76-0159-9904ef9c9061" [ 627.421754] env[69328]: _type = "Task" [ 627.421754] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.433417] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272767, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.443175] env[69328]: DEBUG oslo_vmware.api [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5235433c-0663-1d76-0159-9904ef9c9061, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.609591] env[69328]: DEBUG oslo_vmware.api [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Task: {'id': task-3272764, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.630831] env[69328]: DEBUG nova.network.neutron [req-c29559c4-4c9a-40d3-8bc5-55188906394b req-0efbc9b0-12db-4784-8f95-28836d12ce9c service nova] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 627.659895] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0ea93919-e76c-479d-8b58-9211c6c69435 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 627.775499] env[69328]: DEBUG nova.compute.manager [req-0c84b86f-f6ea-404d-84b8-fe8aa0c5d01d req-5adaac34-ff43-4f57-81dd-cb869fd1021e service nova] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Received event network-vif-deleted-f00ec7b2-0d01-4e8c-b30b-50314520c094 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 627.781009] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.590s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 627.782042] env[69328]: DEBUG nova.compute.manager [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 627.786309] env[69328]: DEBUG oslo_concurrency.lockutils [None req-aec69d4d-dbfb-4864-9814-8fe245f37e57 tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.346s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 627.789580] env[69328]: DEBUG nova.objects.instance [None req-aec69d4d-dbfb-4864-9814-8fe245f37e57 tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Lazy-loading 'resources' on Instance uuid 49a668a7-5967-46a9-823f-7f613d34d152 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 627.925907] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272767, 'name': CreateVM_Task} progress is 25%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.931086] env[69328]: DEBUG nova.network.neutron [req-c29559c4-4c9a-40d3-8bc5-55188906394b req-0efbc9b0-12db-4784-8f95-28836d12ce9c service nova] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 627.941176] env[69328]: DEBUG oslo_vmware.api [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5235433c-0663-1d76-0159-9904ef9c9061, 'name': SearchDatastore_Task, 'duration_secs': 0.063459} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.941599] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7da0621-e70d-4799-ac2a-b725b447b6fa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.950076] env[69328]: DEBUG oslo_vmware.api [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Waiting for the task: (returnval){ [ 627.950076] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ecd9f6-6c75-2d9d-9ace-0cea1c90fb96" [ 627.950076] env[69328]: _type = "Task" [ 627.950076] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.961639] env[69328]: DEBUG oslo_vmware.api [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ecd9f6-6c75-2d9d-9ace-0cea1c90fb96, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.110028] env[69328]: DEBUG oslo_vmware.api [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Task: {'id': task-3272764, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.625032} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.110334] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 26feb2d1-ff64-4a13-af83-b6d5fe4348e1/26feb2d1-ff64-4a13-af83-b6d5fe4348e1.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 628.110427] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 628.114224] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4b359591-4af8-46a4-b561-8c5bf3b196f5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.122918] env[69328]: DEBUG oslo_vmware.api [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Waiting for the task: (returnval){ [ 628.122918] env[69328]: value = "task-3272768" [ 628.122918] env[69328]: _type = "Task" [ 628.122918] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.137032] env[69328]: DEBUG oslo_vmware.api [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Task: {'id': task-3272768, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.227048] env[69328]: DEBUG nova.network.neutron [req-f3b6442c-9c51-4fb0-a0d7-7c0e7d4d1abb req-8a504a34-51ff-4e4e-b4d9-5658f8e258fb service nova] [instance: edb1a21a-6907-4198-a977-c1213e8fecc0] Updated VIF entry in instance network info cache for port c9854116-a48f-47e7-8b3e-7a2a04699cc8. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 628.227490] env[69328]: DEBUG nova.network.neutron [req-f3b6442c-9c51-4fb0-a0d7-7c0e7d4d1abb req-8a504a34-51ff-4e4e-b4d9-5658f8e258fb service nova] [instance: edb1a21a-6907-4198-a977-c1213e8fecc0] Updating instance_info_cache with network_info: [{"id": "c9854116-a48f-47e7-8b3e-7a2a04699cc8", "address": "fa:16:3e:36:39:58", "network": {"id": "476fe2b5-7e7f-4e61-ae35-6ea0ae8e3cb8", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-755380080-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "baf187118a5148ff872d234776a2db47", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bf86b133-2b7b-4cab-8f6f-5a0856d34c7b", "external-id": "nsx-vlan-transportzone-557", "segmentation_id": 557, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9854116-a4", "ovs_interfaceid": "c9854116-a48f-47e7-8b3e-7a2a04699cc8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 628.291029] env[69328]: DEBUG nova.compute.utils [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 628.292638] env[69328]: DEBUG nova.compute.manager [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 628.292815] env[69328]: DEBUG nova.network.neutron [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 628.388570] env[69328]: DEBUG nova.policy [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1c219ff2dbb749149250a44a781f01c0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a2c1e1002e98420c98a7ddb0392612f0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 628.424554] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272767, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.433876] env[69328]: DEBUG oslo_concurrency.lockutils [req-c29559c4-4c9a-40d3-8bc5-55188906394b req-0efbc9b0-12db-4784-8f95-28836d12ce9c service nova] Releasing lock "refresh_cache-bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 628.434263] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Acquired lock "refresh_cache-bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 628.434423] env[69328]: DEBUG nova.network.neutron [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 628.470312] env[69328]: DEBUG oslo_vmware.api [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ecd9f6-6c75-2d9d-9ace-0cea1c90fb96, 'name': SearchDatastore_Task, 'duration_secs': 0.026787} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.470513] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 628.470777] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] edb1a21a-6907-4198-a977-c1213e8fecc0/edb1a21a-6907-4198-a977-c1213e8fecc0.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 628.472924] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-63956b28-0bb3-472d-b903-8705f71cc3f8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.480826] env[69328]: DEBUG oslo_vmware.api [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Waiting for the task: (returnval){ [ 628.480826] env[69328]: value = "task-3272769" [ 628.480826] env[69328]: _type = "Task" [ 628.480826] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.490262] env[69328]: DEBUG oslo_vmware.api [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Task: {'id': task-3272769, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.638873] env[69328]: DEBUG oslo_vmware.api [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Task: {'id': task-3272768, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.112178} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.639344] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 628.640111] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91289ae9-8cce-417e-9414-c9a2159ce4e3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.672693] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Reconfiguring VM instance instance-0000000c to attach disk [datastore2] 26feb2d1-ff64-4a13-af83-b6d5fe4348e1/26feb2d1-ff64-4a13-af83-b6d5fe4348e1.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 628.675437] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-50bfc90f-51b1-4f01-b52d-ba72d93b1d6f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.697903] env[69328]: DEBUG oslo_vmware.api [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Waiting for the task: (returnval){ [ 628.697903] env[69328]: value = "task-3272770" [ 628.697903] env[69328]: _type = "Task" [ 628.697903] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.713396] env[69328]: DEBUG oslo_vmware.api [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Task: {'id': task-3272770, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.731441] env[69328]: DEBUG oslo_concurrency.lockutils [req-f3b6442c-9c51-4fb0-a0d7-7c0e7d4d1abb req-8a504a34-51ff-4e4e-b4d9-5658f8e258fb service nova] Releasing lock "refresh_cache-edb1a21a-6907-4198-a977-c1213e8fecc0" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 628.731759] env[69328]: DEBUG nova.compute.manager [req-f3b6442c-9c51-4fb0-a0d7-7c0e7d4d1abb req-8a504a34-51ff-4e4e-b4d9-5658f8e258fb service nova] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Received event network-changed-369bd8af-cb0d-49c0-b41e-69689c57cc0a {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 628.731941] env[69328]: DEBUG nova.compute.manager [req-f3b6442c-9c51-4fb0-a0d7-7c0e7d4d1abb req-8a504a34-51ff-4e4e-b4d9-5658f8e258fb service nova] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Refreshing instance network info cache due to event network-changed-369bd8af-cb0d-49c0-b41e-69689c57cc0a. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 628.732195] env[69328]: DEBUG oslo_concurrency.lockutils [req-f3b6442c-9c51-4fb0-a0d7-7c0e7d4d1abb req-8a504a34-51ff-4e4e-b4d9-5658f8e258fb service nova] Acquiring lock "refresh_cache-a798c3f2-ccde-488e-8a14-21f4a04f8e12" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 628.732371] env[69328]: DEBUG oslo_concurrency.lockutils [req-f3b6442c-9c51-4fb0-a0d7-7c0e7d4d1abb req-8a504a34-51ff-4e4e-b4d9-5658f8e258fb service nova] Acquired lock "refresh_cache-a798c3f2-ccde-488e-8a14-21f4a04f8e12" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 628.732555] env[69328]: DEBUG nova.network.neutron [req-f3b6442c-9c51-4fb0-a0d7-7c0e7d4d1abb req-8a504a34-51ff-4e4e-b4d9-5658f8e258fb service nova] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Refreshing network info cache for port 369bd8af-cb0d-49c0-b41e-69689c57cc0a {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 628.797520] env[69328]: DEBUG nova.compute.manager [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 628.824089] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad4ccbdb-1241-4424-bda9-85e9031f59e4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.835229] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-611b7403-c103-46d8-a73b-fe656d57c146 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.873659] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32b13478-b460-463d-98ac-4546d19afc6d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.885148] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf65decb-03c3-4f3e-92df-2f01e24100ae {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.903738] env[69328]: DEBUG nova.compute.provider_tree [None req-aec69d4d-dbfb-4864-9814-8fe245f37e57 tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 628.922581] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272767, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.991905] env[69328]: DEBUG oslo_vmware.api [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Task: {'id': task-3272769, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.037913] env[69328]: DEBUG nova.network.neutron [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 629.142719] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Acquiring lock "c3673531-9167-4d33-b8ce-d6afa5e589bc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 629.142719] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Lock "c3673531-9167-4d33-b8ce-d6afa5e589bc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 629.214035] env[69328]: DEBUG oslo_vmware.api [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Task: {'id': task-3272770, 'name': ReconfigVM_Task, 'duration_secs': 0.31883} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.214035] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Reconfigured VM instance instance-0000000c to attach disk [datastore2] 26feb2d1-ff64-4a13-af83-b6d5fe4348e1/26feb2d1-ff64-4a13-af83-b6d5fe4348e1.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 629.216843] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d80de717-220b-47e1-b8b4-c7c6e0cca1f4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.229246] env[69328]: DEBUG oslo_vmware.api [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Waiting for the task: (returnval){ [ 629.229246] env[69328]: value = "task-3272771" [ 629.229246] env[69328]: _type = "Task" [ 629.229246] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.242572] env[69328]: DEBUG oslo_vmware.api [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Task: {'id': task-3272771, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.408303] env[69328]: DEBUG nova.scheduler.client.report [None req-aec69d4d-dbfb-4864-9814-8fe245f37e57 tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 629.427247] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272767, 'name': CreateVM_Task, 'duration_secs': 2.007688} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.429210] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f428f9a9-d792-4c1c-b2d4-ea066cc09d67] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 629.429677] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 629.429828] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 629.430156] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 629.431316] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1792fcde-cb17-47ee-823a-bce2bf613035 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.444274] env[69328]: DEBUG oslo_vmware.api [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Waiting for the task: (returnval){ [ 629.444274] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]521b9b5c-3a5d-c4ae-3aa7-e3ce2ab5fd7e" [ 629.444274] env[69328]: _type = "Task" [ 629.444274] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.455532] env[69328]: DEBUG oslo_vmware.api [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]521b9b5c-3a5d-c4ae-3aa7-e3ce2ab5fd7e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.473177] env[69328]: DEBUG nova.network.neutron [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] Successfully created port: 18c21ecf-7293-4ec4-ad46-0b4bcf9c6366 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 629.495799] env[69328]: DEBUG oslo_vmware.api [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Task: {'id': task-3272769, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.561190] env[69328]: DEBUG nova.network.neutron [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Updating instance_info_cache with network_info: [{"id": "e401a888-b320-4f5f-bcdc-5d8c86b99ce7", "address": "fa:16:3e:34:16:b5", "network": {"id": "023046e8-e113-4ce9-95d7-1c04fc034ba6", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-89845670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1467d48a61f7410b8f6d5a981d169563", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape401a888-b3", "ovs_interfaceid": "e401a888-b320-4f5f-bcdc-5d8c86b99ce7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 629.748474] env[69328]: DEBUG oslo_vmware.api [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Task: {'id': task-3272771, 'name': Rename_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.820053] env[69328]: DEBUG nova.compute.manager [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 629.855478] env[69328]: DEBUG nova.virt.hardware [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 629.855711] env[69328]: DEBUG nova.virt.hardware [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 629.855858] env[69328]: DEBUG nova.virt.hardware [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 629.856978] env[69328]: DEBUG nova.virt.hardware [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 629.857232] env[69328]: DEBUG nova.virt.hardware [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 629.857393] env[69328]: DEBUG nova.virt.hardware [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 629.857613] env[69328]: DEBUG nova.virt.hardware [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 629.857772] env[69328]: DEBUG nova.virt.hardware [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 629.858268] env[69328]: DEBUG nova.virt.hardware [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 629.858753] env[69328]: DEBUG nova.virt.hardware [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 629.858753] env[69328]: DEBUG nova.virt.hardware [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 629.859586] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de4aa309-0278-47ee-b95a-eb198a2fb8c2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.869776] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bfd83a8-05db-4d5d-83fa-5287e3b49a60 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.914888] env[69328]: DEBUG oslo_concurrency.lockutils [None req-aec69d4d-dbfb-4864-9814-8fe245f37e57 tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.128s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 629.918292] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.105s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 629.921658] env[69328]: INFO nova.compute.claims [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 629.942583] env[69328]: INFO nova.scheduler.client.report [None req-aec69d4d-dbfb-4864-9814-8fe245f37e57 tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Deleted allocations for instance 49a668a7-5967-46a9-823f-7f613d34d152 [ 629.959039] env[69328]: DEBUG oslo_vmware.api [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]521b9b5c-3a5d-c4ae-3aa7-e3ce2ab5fd7e, 'name': SearchDatastore_Task, 'duration_secs': 0.017884} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.959039] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 629.959039] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] [instance: f428f9a9-d792-4c1c-b2d4-ea066cc09d67] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 629.959288] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 629.959330] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 629.959506] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 629.959845] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e8250933-2a82-4c2e-bf22-f972a61851ce {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.971121] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 629.971305] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 629.972621] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e430675-25d2-451d-a546-abfe2238e198 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.980805] env[69328]: DEBUG oslo_vmware.api [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Waiting for the task: (returnval){ [ 629.980805] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52eff156-a108-428f-d56e-d925fd881b83" [ 629.980805] env[69328]: _type = "Task" [ 629.980805] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.992937] env[69328]: DEBUG oslo_vmware.api [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52eff156-a108-428f-d56e-d925fd881b83, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.996263] env[69328]: DEBUG oslo_vmware.api [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Task: {'id': task-3272769, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.500423} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.996718] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] edb1a21a-6907-4198-a977-c1213e8fecc0/edb1a21a-6907-4198-a977-c1213e8fecc0.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 629.996718] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] [instance: edb1a21a-6907-4198-a977-c1213e8fecc0] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 629.996950] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-addbc54e-794f-46fd-af02-55cfd74a3ecd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.007544] env[69328]: DEBUG oslo_vmware.api [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Waiting for the task: (returnval){ [ 630.007544] env[69328]: value = "task-3272772" [ 630.007544] env[69328]: _type = "Task" [ 630.007544] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.019242] env[69328]: DEBUG oslo_vmware.api [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Task: {'id': task-3272772, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.064774] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Releasing lock "refresh_cache-bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 630.065313] env[69328]: DEBUG nova.compute.manager [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Instance network_info: |[{"id": "e401a888-b320-4f5f-bcdc-5d8c86b99ce7", "address": "fa:16:3e:34:16:b5", "network": {"id": "023046e8-e113-4ce9-95d7-1c04fc034ba6", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-89845670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1467d48a61f7410b8f6d5a981d169563", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape401a888-b3", "ovs_interfaceid": "e401a888-b320-4f5f-bcdc-5d8c86b99ce7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 630.065635] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:34:16:b5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0f6d1427-d86b-4371-9172-50e4bb0eb1cb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e401a888-b320-4f5f-bcdc-5d8c86b99ce7', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 630.076274] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Creating folder: Project (1467d48a61f7410b8f6d5a981d169563). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 630.076645] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b9a1746c-6c9c-426f-ad9b-71cad805c967 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.088011] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Created folder: Project (1467d48a61f7410b8f6d5a981d169563) in parent group-v653649. [ 630.088245] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Creating folder: Instances. Parent ref: group-v653691. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 630.088646] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6e3b7e4e-c17d-4a08-97a0-5fdeb2ec9082 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.095826] env[69328]: DEBUG nova.network.neutron [req-f3b6442c-9c51-4fb0-a0d7-7c0e7d4d1abb req-8a504a34-51ff-4e4e-b4d9-5658f8e258fb service nova] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Updated VIF entry in instance network info cache for port 369bd8af-cb0d-49c0-b41e-69689c57cc0a. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 630.099248] env[69328]: DEBUG nova.network.neutron [req-f3b6442c-9c51-4fb0-a0d7-7c0e7d4d1abb req-8a504a34-51ff-4e4e-b4d9-5658f8e258fb service nova] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Updating instance_info_cache with network_info: [{"id": "369bd8af-cb0d-49c0-b41e-69689c57cc0a", "address": "fa:16:3e:d2:78:c0", "network": {"id": "f5e5a30f-d08e-46d6-9b1f-18d5e4410095", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-967086728-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.145", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4c353c4bd87647548297e8b8553a48e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d377d75-3add-4a15-8691-74b2eb010924", "external-id": "nsx-vlan-transportzone-71", "segmentation_id": 71, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap369bd8af-cb", "ovs_interfaceid": "369bd8af-cb0d-49c0-b41e-69689c57cc0a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 630.099498] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Created folder: Instances in parent group-v653691. [ 630.099735] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 630.099928] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 630.103028] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-020fd514-df02-4a14-a23b-1e6210017548 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.122667] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 630.122667] env[69328]: value = "task-3272775" [ 630.122667] env[69328]: _type = "Task" [ 630.122667] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.133126] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272775, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.246273] env[69328]: DEBUG oslo_vmware.api [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Task: {'id': task-3272771, 'name': Rename_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.454264] env[69328]: DEBUG oslo_concurrency.lockutils [None req-aec69d4d-dbfb-4864-9814-8fe245f37e57 tempest-ServerDiagnosticsTest-1843522277 tempest-ServerDiagnosticsTest-1843522277-project-member] Lock "49a668a7-5967-46a9-823f-7f613d34d152" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.995s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 630.497250] env[69328]: DEBUG oslo_vmware.api [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52eff156-a108-428f-d56e-d925fd881b83, 'name': SearchDatastore_Task, 'duration_secs': 0.020445} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.501026] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64ecd805-fc91-47d1-909a-d1731a8d2c39 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.509278] env[69328]: DEBUG oslo_vmware.api [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Waiting for the task: (returnval){ [ 630.509278] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]520e3a30-34f2-b47d-9bb7-09894f0f5f84" [ 630.509278] env[69328]: _type = "Task" [ 630.509278] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.525563] env[69328]: DEBUG oslo_vmware.api [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Task: {'id': task-3272772, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073944} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.529523] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] [instance: edb1a21a-6907-4198-a977-c1213e8fecc0] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 630.529870] env[69328]: DEBUG oslo_vmware.api [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]520e3a30-34f2-b47d-9bb7-09894f0f5f84, 'name': SearchDatastore_Task, 'duration_secs': 0.011331} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.530743] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39fbc126-10a7-46b3-a0ad-eb27c7ea10e1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.533406] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 630.533741] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] f428f9a9-d792-4c1c-b2d4-ea066cc09d67/f428f9a9-d792-4c1c-b2d4-ea066cc09d67.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 630.533998] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f054dbaf-f423-46a8-a9e9-d71c4a5880c3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.563321] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] [instance: edb1a21a-6907-4198-a977-c1213e8fecc0] Reconfiguring VM instance instance-0000000d to attach disk [datastore2] edb1a21a-6907-4198-a977-c1213e8fecc0/edb1a21a-6907-4198-a977-c1213e8fecc0.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 630.565308] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-db8dcb67-08d1-4556-949f-b2f912410aaf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.581738] env[69328]: DEBUG oslo_vmware.api [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Waiting for the task: (returnval){ [ 630.581738] env[69328]: value = "task-3272776" [ 630.581738] env[69328]: _type = "Task" [ 630.581738] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.586608] env[69328]: DEBUG oslo_vmware.api [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Waiting for the task: (returnval){ [ 630.586608] env[69328]: value = "task-3272777" [ 630.586608] env[69328]: _type = "Task" [ 630.586608] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.593416] env[69328]: DEBUG oslo_vmware.api [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Task: {'id': task-3272776, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.598841] env[69328]: DEBUG oslo_concurrency.lockutils [req-f3b6442c-9c51-4fb0-a0d7-7c0e7d4d1abb req-8a504a34-51ff-4e4e-b4d9-5658f8e258fb service nova] Releasing lock "refresh_cache-a798c3f2-ccde-488e-8a14-21f4a04f8e12" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 630.599255] env[69328]: DEBUG oslo_vmware.api [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Task: {'id': task-3272777, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.632656] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272775, 'name': CreateVM_Task, 'duration_secs': 0.455203} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.632840] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 630.633546] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 630.633724] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 630.634138] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 630.634405] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27262a19-0add-458f-a61e-038e258ab46a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.641230] env[69328]: DEBUG oslo_vmware.api [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Waiting for the task: (returnval){ [ 630.641230] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5210cf8e-2975-6587-7c27-37605e3cb23f" [ 630.641230] env[69328]: _type = "Task" [ 630.641230] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.655675] env[69328]: DEBUG oslo_vmware.api [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5210cf8e-2975-6587-7c27-37605e3cb23f, 'name': SearchDatastore_Task, 'duration_secs': 0.014166} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.655965] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 630.656247] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 630.656425] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 630.656573] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 630.656745] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 630.657017] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c30ab851-4a01-4843-a172-a3f1b656fafd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.665017] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 630.665195] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 630.665942] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9034e3b-16c9-447e-9ff3-881f4debd5c1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.671158] env[69328]: DEBUG oslo_vmware.api [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Waiting for the task: (returnval){ [ 630.671158] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5209830a-037c-bf8f-d9ec-94c36b1abbc1" [ 630.671158] env[69328]: _type = "Task" [ 630.671158] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.678816] env[69328]: DEBUG oslo_vmware.api [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5209830a-037c-bf8f-d9ec-94c36b1abbc1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.744264] env[69328]: DEBUG oslo_vmware.api [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Task: {'id': task-3272771, 'name': Rename_Task, 'duration_secs': 1.16184} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.744630] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 630.744960] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e8180076-ba1a-40b6-b10e-ad76b0806a1b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.751778] env[69328]: DEBUG oslo_vmware.api [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Waiting for the task: (returnval){ [ 630.751778] env[69328]: value = "task-3272778" [ 630.751778] env[69328]: _type = "Task" [ 630.751778] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.760401] env[69328]: DEBUG oslo_vmware.api [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Task: {'id': task-3272778, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.105296] env[69328]: DEBUG oslo_vmware.api [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Task: {'id': task-3272776, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.505598} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.109691] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] f428f9a9-d792-4c1c-b2d4-ea066cc09d67/f428f9a9-d792-4c1c-b2d4-ea066cc09d67.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 631.109942] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] [instance: f428f9a9-d792-4c1c-b2d4-ea066cc09d67] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 631.110467] env[69328]: DEBUG oslo_vmware.api [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Task: {'id': task-3272777, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.111479] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8250567a-5ba6-43a9-839e-35109b1af2c1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.118238] env[69328]: DEBUG oslo_vmware.api [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Waiting for the task: (returnval){ [ 631.118238] env[69328]: value = "task-3272779" [ 631.118238] env[69328]: _type = "Task" [ 631.118238] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.127661] env[69328]: DEBUG oslo_vmware.api [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Task: {'id': task-3272779, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.189362] env[69328]: DEBUG oslo_vmware.api [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5209830a-037c-bf8f-d9ec-94c36b1abbc1, 'name': SearchDatastore_Task, 'duration_secs': 0.012375} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.193288] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5cb91fd1-1e4a-483d-a383-2b97afe55362 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.199352] env[69328]: DEBUG oslo_vmware.api [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Waiting for the task: (returnval){ [ 631.199352] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5252aef8-f40e-fafe-4f10-5a470a5975dd" [ 631.199352] env[69328]: _type = "Task" [ 631.199352] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.216723] env[69328]: DEBUG oslo_vmware.api [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5252aef8-f40e-fafe-4f10-5a470a5975dd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.271049] env[69328]: DEBUG oslo_vmware.api [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Task: {'id': task-3272778, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.514321] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecad32c9-0b1f-4512-8bb2-5ea6227950c2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.524332] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e1ce3ca-5ad7-4a18-80dc-6626029b83f2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.563020] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad0c2f00-72f4-4dba-bd9e-05bbf9906714 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.569086] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5340208-4bff-4fd1-9976-8a31b0554897 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.585962] env[69328]: DEBUG nova.compute.provider_tree [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 631.605470] env[69328]: DEBUG oslo_vmware.api [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Task: {'id': task-3272777, 'name': ReconfigVM_Task, 'duration_secs': 0.58692} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.606160] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] [instance: edb1a21a-6907-4198-a977-c1213e8fecc0] Reconfigured VM instance instance-0000000d to attach disk [datastore2] edb1a21a-6907-4198-a977-c1213e8fecc0/edb1a21a-6907-4198-a977-c1213e8fecc0.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 631.606788] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-155e5a63-7b5c-4a89-92a4-9b45d82a31f0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.613787] env[69328]: DEBUG oslo_vmware.api [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Waiting for the task: (returnval){ [ 631.613787] env[69328]: value = "task-3272780" [ 631.613787] env[69328]: _type = "Task" [ 631.613787] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.624899] env[69328]: DEBUG oslo_vmware.api [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Task: {'id': task-3272780, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.630179] env[69328]: DEBUG oslo_vmware.api [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Task: {'id': task-3272779, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080559} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.630471] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] [instance: f428f9a9-d792-4c1c-b2d4-ea066cc09d67] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 631.631294] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afdb6fd5-00e0-4ef1-a743-a31585b6b3dc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.655352] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] [instance: f428f9a9-d792-4c1c-b2d4-ea066cc09d67] Reconfiguring VM instance instance-0000000f to attach disk [datastore2] f428f9a9-d792-4c1c-b2d4-ea066cc09d67/f428f9a9-d792-4c1c-b2d4-ea066cc09d67.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 631.655352] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a8ed5c66-948c-4a04-ac81-522d3c56a9c1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.674547] env[69328]: DEBUG oslo_vmware.api [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Waiting for the task: (returnval){ [ 631.674547] env[69328]: value = "task-3272781" [ 631.674547] env[69328]: _type = "Task" [ 631.674547] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.684117] env[69328]: DEBUG oslo_vmware.api [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Task: {'id': task-3272781, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.709843] env[69328]: DEBUG oslo_vmware.api [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5252aef8-f40e-fafe-4f10-5a470a5975dd, 'name': SearchDatastore_Task, 'duration_secs': 0.025504} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.710225] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 631.710494] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f/bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 631.710812] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eec8b43c-776d-4304-96d6-58707c6f64ae {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.718266] env[69328]: DEBUG oslo_vmware.api [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Waiting for the task: (returnval){ [ 631.718266] env[69328]: value = "task-3272782" [ 631.718266] env[69328]: _type = "Task" [ 631.718266] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.727024] env[69328]: DEBUG oslo_vmware.api [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Task: {'id': task-3272782, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.765982] env[69328]: DEBUG oslo_vmware.api [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Task: {'id': task-3272778, 'name': PowerOnVM_Task, 'duration_secs': 0.834785} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.766847] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 631.766847] env[69328]: DEBUG nova.compute.manager [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 631.767891] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f5c4278-9bcd-46f8-b621-6896cc1a4cb4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.091260] env[69328]: DEBUG nova.scheduler.client.report [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 632.129319] env[69328]: DEBUG oslo_vmware.api [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Task: {'id': task-3272780, 'name': Rename_Task, 'duration_secs': 0.143182} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.132620] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] [instance: edb1a21a-6907-4198-a977-c1213e8fecc0] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 632.132620] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6f4a7915-bc6b-4c75-a1ca-22c844c3a803 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.138725] env[69328]: DEBUG oslo_vmware.api [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Waiting for the task: (returnval){ [ 632.138725] env[69328]: value = "task-3272783" [ 632.138725] env[69328]: _type = "Task" [ 632.138725] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.155028] env[69328]: DEBUG oslo_vmware.api [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Task: {'id': task-3272783, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.193273] env[69328]: DEBUG oslo_vmware.api [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Task: {'id': task-3272781, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.233494] env[69328]: DEBUG oslo_vmware.api [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Task: {'id': task-3272782, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.508429} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.233681] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f/bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 632.233897] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 632.234174] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-45a90c9a-344d-472e-adbc-0f8320460120 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.244456] env[69328]: DEBUG oslo_vmware.api [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Waiting for the task: (returnval){ [ 632.244456] env[69328]: value = "task-3272784" [ 632.244456] env[69328]: _type = "Task" [ 632.244456] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.254189] env[69328]: DEBUG oslo_vmware.api [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Task: {'id': task-3272784, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.259083] env[69328]: DEBUG nova.network.neutron [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] Successfully updated port: 18c21ecf-7293-4ec4-ad46-0b4bcf9c6366 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 632.285260] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 632.601974] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.685s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 632.602650] env[69328]: DEBUG nova.compute.manager [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 632.606309] env[69328]: DEBUG oslo_concurrency.lockutils [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.179s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 632.608157] env[69328]: INFO nova.compute.claims [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 632.659581] env[69328]: DEBUG oslo_vmware.api [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Task: {'id': task-3272783, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.692438] env[69328]: DEBUG oslo_vmware.api [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Task: {'id': task-3272781, 'name': ReconfigVM_Task, 'duration_secs': 0.532784} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.693462] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] [instance: f428f9a9-d792-4c1c-b2d4-ea066cc09d67] Reconfigured VM instance instance-0000000f to attach disk [datastore2] f428f9a9-d792-4c1c-b2d4-ea066cc09d67/f428f9a9-d792-4c1c-b2d4-ea066cc09d67.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 632.694875] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f80cac90-146d-4470-8025-ed12548ec73b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.701290] env[69328]: DEBUG oslo_vmware.api [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Waiting for the task: (returnval){ [ 632.701290] env[69328]: value = "task-3272785" [ 632.701290] env[69328]: _type = "Task" [ 632.701290] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.714584] env[69328]: DEBUG oslo_vmware.api [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Task: {'id': task-3272785, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.756639] env[69328]: DEBUG oslo_vmware.api [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Task: {'id': task-3272784, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084763} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.757478] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 632.758909] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-520a2988-7fda-454a-8ccc-08340064ead9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.761871] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Acquiring lock "refresh_cache-7b348a95-3ab2-4112-87e3-b17504c0a302" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 632.762019] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Acquired lock "refresh_cache-7b348a95-3ab2-4112-87e3-b17504c0a302" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 632.762166] env[69328]: DEBUG nova.network.neutron [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 632.786069] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Reconfiguring VM instance instance-0000000e to attach disk [datastore2] bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f/bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 632.789951] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5de18e39-721f-4d42-b938-1d6aebe562b4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.811141] env[69328]: DEBUG oslo_vmware.api [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Waiting for the task: (returnval){ [ 632.811141] env[69328]: value = "task-3272786" [ 632.811141] env[69328]: _type = "Task" [ 632.811141] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.821596] env[69328]: DEBUG oslo_vmware.api [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Task: {'id': task-3272786, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.870159] env[69328]: DEBUG nova.network.neutron [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 633.004890] env[69328]: DEBUG nova.compute.manager [req-f1f4d495-af2b-4418-9637-b8d567746c28 req-a0172178-4700-4528-977e-635d17513758 service nova] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] Received event network-vif-plugged-18c21ecf-7293-4ec4-ad46-0b4bcf9c6366 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 633.005188] env[69328]: DEBUG oslo_concurrency.lockutils [req-f1f4d495-af2b-4418-9637-b8d567746c28 req-a0172178-4700-4528-977e-635d17513758 service nova] Acquiring lock "7b348a95-3ab2-4112-87e3-b17504c0a302-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 633.007148] env[69328]: DEBUG oslo_concurrency.lockutils [req-f1f4d495-af2b-4418-9637-b8d567746c28 req-a0172178-4700-4528-977e-635d17513758 service nova] Lock "7b348a95-3ab2-4112-87e3-b17504c0a302-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 633.007148] env[69328]: DEBUG oslo_concurrency.lockutils [req-f1f4d495-af2b-4418-9637-b8d567746c28 req-a0172178-4700-4528-977e-635d17513758 service nova] Lock "7b348a95-3ab2-4112-87e3-b17504c0a302-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 633.007148] env[69328]: DEBUG nova.compute.manager [req-f1f4d495-af2b-4418-9637-b8d567746c28 req-a0172178-4700-4528-977e-635d17513758 service nova] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] No waiting events found dispatching network-vif-plugged-18c21ecf-7293-4ec4-ad46-0b4bcf9c6366 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 633.007148] env[69328]: WARNING nova.compute.manager [req-f1f4d495-af2b-4418-9637-b8d567746c28 req-a0172178-4700-4528-977e-635d17513758 service nova] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] Received unexpected event network-vif-plugged-18c21ecf-7293-4ec4-ad46-0b4bcf9c6366 for instance with vm_state building and task_state spawning. [ 633.080729] env[69328]: DEBUG nova.network.neutron [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] Updating instance_info_cache with network_info: [{"id": "18c21ecf-7293-4ec4-ad46-0b4bcf9c6366", "address": "fa:16:3e:ed:f9:24", "network": {"id": "4707aea3-ce46-4fe0-bf5c-141ee5596a86", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.30", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ecd184c6b78b4e4297fb93abb94aa37d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18c21ecf-72", "ovs_interfaceid": "18c21ecf-7293-4ec4-ad46-0b4bcf9c6366", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 633.110701] env[69328]: DEBUG nova.compute.utils [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 633.112543] env[69328]: DEBUG nova.compute.manager [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 633.112733] env[69328]: DEBUG nova.network.neutron [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 633.154549] env[69328]: DEBUG oslo_vmware.api [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Task: {'id': task-3272783, 'name': PowerOnVM_Task, 'duration_secs': 0.543303} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.155822] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] [instance: edb1a21a-6907-4198-a977-c1213e8fecc0] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 633.155822] env[69328]: INFO nova.compute.manager [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] [instance: edb1a21a-6907-4198-a977-c1213e8fecc0] Took 11.18 seconds to spawn the instance on the hypervisor. [ 633.155822] env[69328]: DEBUG nova.compute.manager [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] [instance: edb1a21a-6907-4198-a977-c1213e8fecc0] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 633.156702] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fe81256-408f-4b05-83e6-c0f038ab0a10 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.214411] env[69328]: DEBUG oslo_vmware.api [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Task: {'id': task-3272785, 'name': Rename_Task, 'duration_secs': 0.132137} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.214692] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] [instance: f428f9a9-d792-4c1c-b2d4-ea066cc09d67] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 633.214946] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f2b9a5b5-855b-47c8-aca1-7412fceb14d5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.224647] env[69328]: DEBUG nova.policy [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bc42a62399db4e1faab18af797b5b868', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '082fe2c32b134a4a9600f9f124d5d863', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 633.226763] env[69328]: DEBUG oslo_vmware.api [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Waiting for the task: (returnval){ [ 633.226763] env[69328]: value = "task-3272787" [ 633.226763] env[69328]: _type = "Task" [ 633.226763] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.240537] env[69328]: DEBUG oslo_vmware.api [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Task: {'id': task-3272787, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.338269] env[69328]: DEBUG oslo_vmware.api [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Task: {'id': task-3272786, 'name': ReconfigVM_Task, 'duration_secs': 0.28098} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.338590] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Reconfigured VM instance instance-0000000e to attach disk [datastore2] bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f/bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 633.339892] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bdbcee1f-5328-4c1e-a2a0-1a498275f833 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.347853] env[69328]: DEBUG oslo_vmware.api [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Waiting for the task: (returnval){ [ 633.347853] env[69328]: value = "task-3272788" [ 633.347853] env[69328]: _type = "Task" [ 633.347853] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.359662] env[69328]: DEBUG oslo_vmware.api [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Task: {'id': task-3272788, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.383994] env[69328]: DEBUG oslo_concurrency.lockutils [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "b0a1441c-81e2-4131-a2ff-f5042d559d9f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 633.388256] env[69328]: DEBUG oslo_concurrency.lockutils [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "b0a1441c-81e2-4131-a2ff-f5042d559d9f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 633.584471] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Releasing lock "refresh_cache-7b348a95-3ab2-4112-87e3-b17504c0a302" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 633.588175] env[69328]: DEBUG nova.compute.manager [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] Instance network_info: |[{"id": "18c21ecf-7293-4ec4-ad46-0b4bcf9c6366", "address": "fa:16:3e:ed:f9:24", "network": {"id": "4707aea3-ce46-4fe0-bf5c-141ee5596a86", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.30", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ecd184c6b78b4e4297fb93abb94aa37d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18c21ecf-72", "ovs_interfaceid": "18c21ecf-7293-4ec4-ad46-0b4bcf9c6366", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 633.589455] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ed:f9:24', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a8b99a46-3e7f-4ef1-9e45-58e6cd17f210', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '18c21ecf-7293-4ec4-ad46-0b4bcf9c6366', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 633.597449] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Creating folder: Project (a2c1e1002e98420c98a7ddb0392612f0). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 633.597785] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fd3c8db8-c3e5-4731-8297-c486b3fffec1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.609680] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Created folder: Project (a2c1e1002e98420c98a7ddb0392612f0) in parent group-v653649. [ 633.609787] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Creating folder: Instances. Parent ref: group-v653694. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 633.613132] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-56aa84da-162d-4a2c-9a96-3b1ed75260e0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.618084] env[69328]: DEBUG nova.compute.manager [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 633.626742] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Created folder: Instances in parent group-v653694. [ 633.626959] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 633.627304] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 633.627522] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9fb453b0-02e0-470c-ba5d-5038824babd1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.657235] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 633.657235] env[69328]: value = "task-3272791" [ 633.657235] env[69328]: _type = "Task" [ 633.657235] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.666156] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272791, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.681177] env[69328]: INFO nova.compute.manager [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] [instance: edb1a21a-6907-4198-a977-c1213e8fecc0] Took 27.75 seconds to build instance. [ 633.741024] env[69328]: DEBUG oslo_vmware.api [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Task: {'id': task-3272787, 'name': PowerOnVM_Task, 'duration_secs': 0.486563} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.744798] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] [instance: f428f9a9-d792-4c1c-b2d4-ea066cc09d67] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 633.744798] env[69328]: INFO nova.compute.manager [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] [instance: f428f9a9-d792-4c1c-b2d4-ea066cc09d67] Took 6.51 seconds to spawn the instance on the hypervisor. [ 633.744798] env[69328]: DEBUG nova.compute.manager [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] [instance: f428f9a9-d792-4c1c-b2d4-ea066cc09d67] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 633.745352] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beb00a4a-bd82-49ff-a21e-28a319d59695 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.821840] env[69328]: INFO nova.compute.manager [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Rebuilding instance [ 633.869179] env[69328]: DEBUG oslo_vmware.api [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Task: {'id': task-3272788, 'name': Rename_Task, 'duration_secs': 0.187988} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.869462] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 633.869703] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9acc5edb-d47e-4dcd-a80d-770f64521807 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.877939] env[69328]: DEBUG oslo_vmware.api [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Waiting for the task: (returnval){ [ 633.877939] env[69328]: value = "task-3272792" [ 633.877939] env[69328]: _type = "Task" [ 633.877939] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.890359] env[69328]: DEBUG nova.compute.manager [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 633.891412] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bdf3602-160a-4bb7-9205-848f773cae26 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.898124] env[69328]: DEBUG oslo_vmware.api [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Task: {'id': task-3272792, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.089445] env[69328]: DEBUG nova.network.neutron [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Successfully created port: ecb0c91b-f122-4c9d-8c9f-480b703a9915 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 634.181382] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272791, 'name': CreateVM_Task, 'duration_secs': 0.474338} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.182801] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2d689e9a-09df-4184-8a69-2bb00cce3b17 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Lock "edb1a21a-6907-4198-a977-c1213e8fecc0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.324s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 634.183235] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 634.183798] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8472fd1-f4c5-405a-b9ea-d287b8fa466e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.188073] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 634.188147] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 634.188464] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 634.188941] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d7ba334-a422-4e83-979a-c7d663f08ac2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.200852] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a0d9c13-eb63-424e-a2f6-8cf624f4eb36 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.204164] env[69328]: DEBUG oslo_vmware.api [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Waiting for the task: (returnval){ [ 634.204164] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52127930-76fb-fd4f-e121-09311664c940" [ 634.204164] env[69328]: _type = "Task" [ 634.204164] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.239851] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b2ce1f7-6a5f-4465-bbff-b81d0af83e4e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.246207] env[69328]: DEBUG oslo_vmware.api [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52127930-76fb-fd4f-e121-09311664c940, 'name': SearchDatastore_Task, 'duration_secs': 0.010867} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.246940] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 634.247249] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 634.247479] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 634.247602] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 634.247799] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 634.248377] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-02b9a9a3-b837-45d8-8459-7ea8a0f45074 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.254502] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e72902b1-feab-45e3-aa56-0131fd14f0ae {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.263713] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 634.263833] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 634.267941] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ed59b35-3b4c-4c79-88f9-48edd12022d1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.277907] env[69328]: INFO nova.compute.manager [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] [instance: f428f9a9-d792-4c1c-b2d4-ea066cc09d67] Took 27.54 seconds to build instance. [ 634.279318] env[69328]: DEBUG nova.compute.provider_tree [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 634.284887] env[69328]: DEBUG oslo_vmware.api [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Waiting for the task: (returnval){ [ 634.284887] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52dd2d36-b1cb-866b-b2d7-4eada5cd5de6" [ 634.284887] env[69328]: _type = "Task" [ 634.284887] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.293829] env[69328]: DEBUG oslo_vmware.api [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52dd2d36-b1cb-866b-b2d7-4eada5cd5de6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.395795] env[69328]: DEBUG oslo_vmware.api [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Task: {'id': task-3272792, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.640038] env[69328]: DEBUG nova.compute.manager [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 634.683261] env[69328]: DEBUG nova.virt.hardware [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 634.683261] env[69328]: DEBUG nova.virt.hardware [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 634.683261] env[69328]: DEBUG nova.virt.hardware [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 634.683532] env[69328]: DEBUG nova.virt.hardware [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 634.683568] env[69328]: DEBUG nova.virt.hardware [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 634.684996] env[69328]: DEBUG nova.virt.hardware [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 634.684996] env[69328]: DEBUG nova.virt.hardware [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 634.684996] env[69328]: DEBUG nova.virt.hardware [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 634.684996] env[69328]: DEBUG nova.virt.hardware [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 634.684996] env[69328]: DEBUG nova.virt.hardware [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 634.685371] env[69328]: DEBUG nova.virt.hardware [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 634.686550] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-873fb20c-c1d0-4171-a4ec-b7c362c0a522 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.689351] env[69328]: DEBUG nova.compute.manager [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 634.698445] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2768b123-56d7-4642-8f55-315c86fdd8ee {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.756676] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Acquiring lock "b7409a67-c140-436f-9c4e-27dae259f648" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 634.757031] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Lock "b7409a67-c140-436f-9c4e-27dae259f648" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 634.782400] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1a2343ea-e5fa-49d1-b1d5-acd701e0add6 tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Lock "f428f9a9-d792-4c1c-b2d4-ea066cc09d67" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.327s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 634.784310] env[69328]: DEBUG nova.scheduler.client.report [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 634.801601] env[69328]: DEBUG oslo_vmware.api [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52dd2d36-b1cb-866b-b2d7-4eada5cd5de6, 'name': SearchDatastore_Task, 'duration_secs': 0.009717} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.802604] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15a1d4ee-c01f-48c0-865f-08952c246e0b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.811168] env[69328]: DEBUG oslo_vmware.api [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Waiting for the task: (returnval){ [ 634.811168] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e90e0c-c647-34dd-74b3-1204c1ef57cd" [ 634.811168] env[69328]: _type = "Task" [ 634.811168] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.818942] env[69328]: DEBUG oslo_vmware.api [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e90e0c-c647-34dd-74b3-1204c1ef57cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.896024] env[69328]: DEBUG oslo_vmware.api [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Task: {'id': task-3272792, 'name': PowerOnVM_Task, 'duration_secs': 0.558952} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.896024] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 634.896024] env[69328]: INFO nova.compute.manager [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Took 10.23 seconds to spawn the instance on the hypervisor. [ 634.896024] env[69328]: DEBUG nova.compute.manager [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 634.896024] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-593e971a-bc87-45fd-a728-f27367247efd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.909428] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 634.909428] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4fe37500-2b11-4198-9524-72a250b72bcb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.915662] env[69328]: DEBUG oslo_vmware.api [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Waiting for the task: (returnval){ [ 634.915662] env[69328]: value = "task-3272793" [ 634.915662] env[69328]: _type = "Task" [ 634.915662] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.923368] env[69328]: DEBUG oslo_vmware.api [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Task: {'id': task-3272793, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.230837] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 635.295347] env[69328]: DEBUG nova.compute.manager [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 635.300492] env[69328]: DEBUG oslo_concurrency.lockutils [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.694s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 635.301043] env[69328]: DEBUG nova.compute.manager [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 635.305507] env[69328]: DEBUG oslo_concurrency.lockutils [None req-423c7c80-1271-4098-b5b6-1229ad381bd9 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.778s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 635.305740] env[69328]: DEBUG nova.objects.instance [None req-423c7c80-1271-4098-b5b6-1229ad381bd9 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Lazy-loading 'resources' on Instance uuid 50b84adc-5ff3-4a1e-a09f-5c96daef9b87 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 635.324822] env[69328]: DEBUG oslo_vmware.api [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e90e0c-c647-34dd-74b3-1204c1ef57cd, 'name': SearchDatastore_Task, 'duration_secs': 0.020431} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.325852] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 635.326140] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 7b348a95-3ab2-4112-87e3-b17504c0a302/7b348a95-3ab2-4112-87e3-b17504c0a302.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 635.326411] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b179c74b-5b5a-46b6-8244-a9410356a076 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.334535] env[69328]: DEBUG oslo_vmware.api [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Waiting for the task: (returnval){ [ 635.334535] env[69328]: value = "task-3272794" [ 635.334535] env[69328]: _type = "Task" [ 635.334535] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.352230] env[69328]: DEBUG oslo_vmware.api [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Task: {'id': task-3272794, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.387524] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 635.387795] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 635.411681] env[69328]: INFO nova.compute.manager [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Took 29.38 seconds to build instance. [ 635.426841] env[69328]: DEBUG oslo_vmware.api [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Task: {'id': task-3272793, 'name': PowerOffVM_Task, 'duration_secs': 0.119054} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.426841] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 635.426841] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 635.427536] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c88aa86-7074-48a5-bc90-fac5cbd980c8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.437540] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 635.437655] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0ba1f3ed-8180-41f0-863b-108906ecfdf3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.465018] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 635.465222] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 635.465399] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Deleting the datastore file [datastore2] 26feb2d1-ff64-4a13-af83-b6d5fe4348e1 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 635.465671] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-61cd05f7-6de1-4f71-b79c-c0caeef42abb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.474849] env[69328]: DEBUG oslo_vmware.api [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Waiting for the task: (returnval){ [ 635.474849] env[69328]: value = "task-3272796" [ 635.474849] env[69328]: _type = "Task" [ 635.474849] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.485010] env[69328]: DEBUG oslo_vmware.api [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Task: {'id': task-3272796, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.707737] env[69328]: DEBUG nova.compute.manager [None req-941eb4d8-27db-4a98-bcd7-338625b3d1dc tempest-ServerDiagnosticsV248Test-1061109847 tempest-ServerDiagnosticsV248Test-1061109847-project-admin] [instance: f428f9a9-d792-4c1c-b2d4-ea066cc09d67] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 635.710435] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f84aa65-176d-47d4-8ed9-ce70fccf8b86 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.720999] env[69328]: INFO nova.compute.manager [None req-941eb4d8-27db-4a98-bcd7-338625b3d1dc tempest-ServerDiagnosticsV248Test-1061109847 tempest-ServerDiagnosticsV248Test-1061109847-project-admin] [instance: f428f9a9-d792-4c1c-b2d4-ea066cc09d67] Retrieving diagnostics [ 635.721868] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0563e637-42e3-4c37-9d8a-bcda51014efd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.764645] env[69328]: DEBUG nova.compute.manager [req-f0ec7cdd-65aa-4dfe-9a8c-ea58099b1857 req-9a5115c2-52be-41cf-a0b9-d01592339c8f service nova] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] Received event network-changed-18c21ecf-7293-4ec4-ad46-0b4bcf9c6366 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 635.764801] env[69328]: DEBUG nova.compute.manager [req-f0ec7cdd-65aa-4dfe-9a8c-ea58099b1857 req-9a5115c2-52be-41cf-a0b9-d01592339c8f service nova] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] Refreshing instance network info cache due to event network-changed-18c21ecf-7293-4ec4-ad46-0b4bcf9c6366. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 635.765043] env[69328]: DEBUG oslo_concurrency.lockutils [req-f0ec7cdd-65aa-4dfe-9a8c-ea58099b1857 req-9a5115c2-52be-41cf-a0b9-d01592339c8f service nova] Acquiring lock "refresh_cache-7b348a95-3ab2-4112-87e3-b17504c0a302" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 635.765184] env[69328]: DEBUG oslo_concurrency.lockutils [req-f0ec7cdd-65aa-4dfe-9a8c-ea58099b1857 req-9a5115c2-52be-41cf-a0b9-d01592339c8f service nova] Acquired lock "refresh_cache-7b348a95-3ab2-4112-87e3-b17504c0a302" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 635.765336] env[69328]: DEBUG nova.network.neutron [req-f0ec7cdd-65aa-4dfe-9a8c-ea58099b1857 req-9a5115c2-52be-41cf-a0b9-d01592339c8f service nova] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] Refreshing network info cache for port 18c21ecf-7293-4ec4-ad46-0b4bcf9c6366 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 635.807717] env[69328]: DEBUG nova.compute.utils [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 635.814246] env[69328]: DEBUG nova.compute.manager [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 635.814554] env[69328]: DEBUG nova.network.neutron [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 635.834668] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 635.846232] env[69328]: DEBUG oslo_vmware.api [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Task: {'id': task-3272794, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.888018] env[69328]: DEBUG nova.policy [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ccaa0ab0a3cc444ea0da2299d62eec2c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c6895a4954cb4bc89dab40eb3f655606', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 635.913493] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f61cc2dd-54ce-4cce-8b49-a72962195d9d tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Lock "bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.188s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 635.989638] env[69328]: DEBUG oslo_vmware.api [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Task: {'id': task-3272796, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.391531} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.990009] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 635.990278] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 635.990522] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 636.246933] env[69328]: DEBUG nova.network.neutron [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Successfully updated port: ecb0c91b-f122-4c9d-8c9f-480b703a9915 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 636.313831] env[69328]: DEBUG nova.compute.manager [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 636.343424] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07d381b8-70f2-492a-8857-4dab3a6531f1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.352396] env[69328]: DEBUG oslo_vmware.api [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Task: {'id': task-3272794, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.554556} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.357233] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 7b348a95-3ab2-4112-87e3-b17504c0a302/7b348a95-3ab2-4112-87e3-b17504c0a302.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 636.357233] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 636.357233] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-72aebd78-1bb7-45d3-889b-e212e0a06e56 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.360732] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69508994-8880-4ff1-af7a-455dd2feda6b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.401264] env[69328]: DEBUG oslo_vmware.api [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Waiting for the task: (returnval){ [ 636.401264] env[69328]: value = "task-3272797" [ 636.401264] env[69328]: _type = "Task" [ 636.401264] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.404971] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17d9a4f6-a1ba-44dd-872d-e70400cef6cb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.421787] env[69328]: DEBUG nova.compute.manager [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 636.423080] env[69328]: DEBUG oslo_vmware.api [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Task: {'id': task-3272797, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.424589] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-186c5eef-6d05-42ff-aa01-d10feca7a697 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.444124] env[69328]: DEBUG nova.compute.provider_tree [None req-423c7c80-1271-4098-b5b6-1229ad381bd9 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 636.444686] env[69328]: DEBUG nova.network.neutron [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Successfully created port: a95af8f2-189b-449d-974d-b380402c6a3f {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 636.727286] env[69328]: DEBUG nova.network.neutron [req-f0ec7cdd-65aa-4dfe-9a8c-ea58099b1857 req-9a5115c2-52be-41cf-a0b9-d01592339c8f service nova] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] Updated VIF entry in instance network info cache for port 18c21ecf-7293-4ec4-ad46-0b4bcf9c6366. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 636.727644] env[69328]: DEBUG nova.network.neutron [req-f0ec7cdd-65aa-4dfe-9a8c-ea58099b1857 req-9a5115c2-52be-41cf-a0b9-d01592339c8f service nova] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] Updating instance_info_cache with network_info: [{"id": "18c21ecf-7293-4ec4-ad46-0b4bcf9c6366", "address": "fa:16:3e:ed:f9:24", "network": {"id": "4707aea3-ce46-4fe0-bf5c-141ee5596a86", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.30", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ecd184c6b78b4e4297fb93abb94aa37d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18c21ecf-72", "ovs_interfaceid": "18c21ecf-7293-4ec4-ad46-0b4bcf9c6366", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 636.749140] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Acquiring lock "refresh_cache-6102f8e6-f815-4f5f-921f-990be81fca0d" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 636.749140] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Acquired lock "refresh_cache-6102f8e6-f815-4f5f-921f-990be81fca0d" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 636.749534] env[69328]: DEBUG nova.network.neutron [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 636.920550] env[69328]: DEBUG oslo_vmware.api [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Task: {'id': task-3272797, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072135} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.920818] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 636.921692] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f3a7ba8-8d70-47f5-83b5-219a0c4bbd53 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.949833] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] Reconfiguring VM instance instance-00000010 to attach disk [datastore2] 7b348a95-3ab2-4112-87e3-b17504c0a302/7b348a95-3ab2-4112-87e3-b17504c0a302.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 636.953008] env[69328]: DEBUG nova.scheduler.client.report [None req-423c7c80-1271-4098-b5b6-1229ad381bd9 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 636.956436] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2e3a93ca-7b3b-42d5-9231-693d7c1c27f6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.973702] env[69328]: DEBUG oslo_concurrency.lockutils [None req-423c7c80-1271-4098-b5b6-1229ad381bd9 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.668s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 636.975955] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.134s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 636.977335] env[69328]: INFO nova.compute.claims [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 636.988839] env[69328]: DEBUG oslo_vmware.api [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Waiting for the task: (returnval){ [ 636.988839] env[69328]: value = "task-3272798" [ 636.988839] env[69328]: _type = "Task" [ 636.988839] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.997853] env[69328]: DEBUG oslo_vmware.api [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Task: {'id': task-3272798, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.999189] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 637.006813] env[69328]: INFO nova.scheduler.client.report [None req-423c7c80-1271-4098-b5b6-1229ad381bd9 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Deleted allocations for instance 50b84adc-5ff3-4a1e-a09f-5c96daef9b87 [ 637.053495] env[69328]: DEBUG nova.virt.hardware [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 637.053855] env[69328]: DEBUG nova.virt.hardware [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 637.053927] env[69328]: DEBUG nova.virt.hardware [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 637.054452] env[69328]: DEBUG nova.virt.hardware [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 637.054452] env[69328]: DEBUG nova.virt.hardware [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 637.054574] env[69328]: DEBUG nova.virt.hardware [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 637.054756] env[69328]: DEBUG nova.virt.hardware [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 637.054855] env[69328]: DEBUG nova.virt.hardware [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 637.055095] env[69328]: DEBUG nova.virt.hardware [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 637.055227] env[69328]: DEBUG nova.virt.hardware [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 637.055442] env[69328]: DEBUG nova.virt.hardware [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 637.058604] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51e162d5-4bf3-46f2-a085-bedc935cf744 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.068580] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cbdc487-aea5-4ee7-97cf-cd979089902d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.085157] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Instance VIF info [] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 637.092203] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 637.092871] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 637.093129] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-12ff067b-9644-41f8-9bcc-dfda38657d6d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.115857] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 637.115857] env[69328]: value = "task-3272799" [ 637.115857] env[69328]: _type = "Task" [ 637.115857] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.123617] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272799, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.230861] env[69328]: DEBUG oslo_concurrency.lockutils [req-f0ec7cdd-65aa-4dfe-9a8c-ea58099b1857 req-9a5115c2-52be-41cf-a0b9-d01592339c8f service nova] Releasing lock "refresh_cache-7b348a95-3ab2-4112-87e3-b17504c0a302" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 637.301273] env[69328]: DEBUG nova.network.neutron [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 637.329658] env[69328]: DEBUG nova.compute.manager [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 637.362580] env[69328]: DEBUG nova.virt.hardware [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 637.362819] env[69328]: DEBUG nova.virt.hardware [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 637.362924] env[69328]: DEBUG nova.virt.hardware [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 637.363126] env[69328]: DEBUG nova.virt.hardware [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 637.363291] env[69328]: DEBUG nova.virt.hardware [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 637.363413] env[69328]: DEBUG nova.virt.hardware [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 637.363613] env[69328]: DEBUG nova.virt.hardware [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 637.363772] env[69328]: DEBUG nova.virt.hardware [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 637.363933] env[69328]: DEBUG nova.virt.hardware [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 637.364108] env[69328]: DEBUG nova.virt.hardware [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 637.367966] env[69328]: DEBUG nova.virt.hardware [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 637.369164] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-886cb54d-1d22-44f8-967d-ca9ba2b6dcea {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.383738] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b08d8d2-7209-4cdd-a8d3-9291f17c249d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.499695] env[69328]: DEBUG oslo_vmware.api [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Task: {'id': task-3272798, 'name': ReconfigVM_Task, 'duration_secs': 0.320176} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.499994] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] Reconfigured VM instance instance-00000010 to attach disk [datastore2] 7b348a95-3ab2-4112-87e3-b17504c0a302/7b348a95-3ab2-4112-87e3-b17504c0a302.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 637.500612] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3aa3bab9-299f-4aad-8c0e-27d21df9d73a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.507998] env[69328]: DEBUG oslo_vmware.api [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Waiting for the task: (returnval){ [ 637.507998] env[69328]: value = "task-3272800" [ 637.507998] env[69328]: _type = "Task" [ 637.507998] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.522534] env[69328]: DEBUG oslo_vmware.api [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Task: {'id': task-3272800, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.523030] env[69328]: DEBUG oslo_concurrency.lockutils [None req-423c7c80-1271-4098-b5b6-1229ad381bd9 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Lock "50b84adc-5ff3-4a1e-a09f-5c96daef9b87" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.420s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 637.524572] env[69328]: DEBUG nova.network.neutron [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Updating instance_info_cache with network_info: [{"id": "ecb0c91b-f122-4c9d-8c9f-480b703a9915", "address": "fa:16:3e:87:cb:44", "network": {"id": "dfd21313-e126-438d-8994-8f4426a2c4b2", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-2034545348-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "082fe2c32b134a4a9600f9f124d5d863", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapecb0c91b-f1", "ovs_interfaceid": "ecb0c91b-f122-4c9d-8c9f-480b703a9915", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 637.626633] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272799, 'name': CreateVM_Task, 'duration_secs': 0.350044} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.630026] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 637.630026] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 637.630026] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 637.630026] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 637.630026] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4eaaaf8c-ef46-4d33-ade0-4360aad777ae {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.632886] env[69328]: DEBUG oslo_vmware.api [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Waiting for the task: (returnval){ [ 637.632886] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52118e89-77a3-e605-f7ed-35840185964b" [ 637.632886] env[69328]: _type = "Task" [ 637.632886] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.641384] env[69328]: DEBUG oslo_vmware.api [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52118e89-77a3-e605-f7ed-35840185964b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.673414] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f41c64e8-47aa-4396-9140-5604f5e9a675 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Acquiring lock "edb1a21a-6907-4198-a977-c1213e8fecc0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 637.676969] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f41c64e8-47aa-4396-9140-5604f5e9a675 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Lock "edb1a21a-6907-4198-a977-c1213e8fecc0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 637.676969] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f41c64e8-47aa-4396-9140-5604f5e9a675 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Acquiring lock "edb1a21a-6907-4198-a977-c1213e8fecc0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 637.676969] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f41c64e8-47aa-4396-9140-5604f5e9a675 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Lock "edb1a21a-6907-4198-a977-c1213e8fecc0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 637.676969] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f41c64e8-47aa-4396-9140-5604f5e9a675 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Lock "edb1a21a-6907-4198-a977-c1213e8fecc0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 637.677185] env[69328]: INFO nova.compute.manager [None req-f41c64e8-47aa-4396-9140-5604f5e9a675 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] [instance: edb1a21a-6907-4198-a977-c1213e8fecc0] Terminating instance [ 638.023338] env[69328]: DEBUG oslo_vmware.api [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Task: {'id': task-3272800, 'name': Rename_Task, 'duration_secs': 0.139373} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.023338] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 638.023338] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7a3f68d7-92af-4683-a152-27fbb26a2334 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.026753] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Releasing lock "refresh_cache-6102f8e6-f815-4f5f-921f-990be81fca0d" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 638.027044] env[69328]: DEBUG nova.compute.manager [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Instance network_info: |[{"id": "ecb0c91b-f122-4c9d-8c9f-480b703a9915", "address": "fa:16:3e:87:cb:44", "network": {"id": "dfd21313-e126-438d-8994-8f4426a2c4b2", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-2034545348-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "082fe2c32b134a4a9600f9f124d5d863", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapecb0c91b-f1", "ovs_interfaceid": "ecb0c91b-f122-4c9d-8c9f-480b703a9915", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 638.027399] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:87:cb:44', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca16b6db-4f74-424c-9d36-925ad82cbdd2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ecb0c91b-f122-4c9d-8c9f-480b703a9915', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 638.036450] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Creating folder: Project (082fe2c32b134a4a9600f9f124d5d863). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 638.041257] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aacbc496-752a-40fc-9be1-a1201f7bd276 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.043344] env[69328]: DEBUG oslo_vmware.api [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Waiting for the task: (returnval){ [ 638.043344] env[69328]: value = "task-3272801" [ 638.043344] env[69328]: _type = "Task" [ 638.043344] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.056666] env[69328]: DEBUG oslo_vmware.api [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Task: {'id': task-3272801, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.058259] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Created folder: Project (082fe2c32b134a4a9600f9f124d5d863) in parent group-v653649. [ 638.058670] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Creating folder: Instances. Parent ref: group-v653698. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 638.063071] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-92a473d4-d706-47f9-99f7-821bbb5956dc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.071289] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Created folder: Instances in parent group-v653698. [ 638.071609] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 638.071864] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 638.072448] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-af0942f1-e230-486b-aeed-a4dcf56b927b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.100570] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 638.100570] env[69328]: value = "task-3272804" [ 638.100570] env[69328]: _type = "Task" [ 638.100570] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.110703] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272804, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.145358] env[69328]: DEBUG oslo_vmware.api [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52118e89-77a3-e605-f7ed-35840185964b, 'name': SearchDatastore_Task, 'duration_secs': 0.012513} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.145775] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 638.146085] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 638.146358] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 638.147579] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 638.147579] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 638.147579] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d4af9c88-c33a-4d7d-9045-73d87af881f5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.155978] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 638.156309] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 638.160505] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d12680a9-9f9e-463a-b3fb-7ac0722bcb47 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.169320] env[69328]: DEBUG oslo_vmware.api [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Waiting for the task: (returnval){ [ 638.169320] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52430e58-7b9e-4c18-6613-8c83210a2be0" [ 638.169320] env[69328]: _type = "Task" [ 638.169320] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.175515] env[69328]: DEBUG oslo_vmware.api [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52430e58-7b9e-4c18-6613-8c83210a2be0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.186672] env[69328]: DEBUG nova.compute.manager [None req-f41c64e8-47aa-4396-9140-5604f5e9a675 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] [instance: edb1a21a-6907-4198-a977-c1213e8fecc0] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 638.186953] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f41c64e8-47aa-4396-9140-5604f5e9a675 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] [instance: edb1a21a-6907-4198-a977-c1213e8fecc0] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 638.187914] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca21072c-e743-48b8-b171-902a9d17e4a6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.195903] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f41c64e8-47aa-4396-9140-5604f5e9a675 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] [instance: edb1a21a-6907-4198-a977-c1213e8fecc0] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 638.196186] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-75b277a9-cca0-48b4-8bce-eddcf03840a9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.203793] env[69328]: DEBUG oslo_vmware.api [None req-f41c64e8-47aa-4396-9140-5604f5e9a675 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Waiting for the task: (returnval){ [ 638.203793] env[69328]: value = "task-3272805" [ 638.203793] env[69328]: _type = "Task" [ 638.203793] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.214075] env[69328]: DEBUG oslo_vmware.api [None req-f41c64e8-47aa-4396-9140-5604f5e9a675 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Task: {'id': task-3272805, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.236235] env[69328]: DEBUG nova.compute.manager [req-a65c6684-77d9-4860-a972-7804b78ee300 req-b4337fa1-86e9-432e-a7d9-2d16863f5e33 service nova] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Received event network-vif-plugged-ecb0c91b-f122-4c9d-8c9f-480b703a9915 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 638.236235] env[69328]: DEBUG oslo_concurrency.lockutils [req-a65c6684-77d9-4860-a972-7804b78ee300 req-b4337fa1-86e9-432e-a7d9-2d16863f5e33 service nova] Acquiring lock "6102f8e6-f815-4f5f-921f-990be81fca0d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 638.236235] env[69328]: DEBUG oslo_concurrency.lockutils [req-a65c6684-77d9-4860-a972-7804b78ee300 req-b4337fa1-86e9-432e-a7d9-2d16863f5e33 service nova] Lock "6102f8e6-f815-4f5f-921f-990be81fca0d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 638.236235] env[69328]: DEBUG oslo_concurrency.lockutils [req-a65c6684-77d9-4860-a972-7804b78ee300 req-b4337fa1-86e9-432e-a7d9-2d16863f5e33 service nova] Lock "6102f8e6-f815-4f5f-921f-990be81fca0d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 638.236235] env[69328]: DEBUG nova.compute.manager [req-a65c6684-77d9-4860-a972-7804b78ee300 req-b4337fa1-86e9-432e-a7d9-2d16863f5e33 service nova] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] No waiting events found dispatching network-vif-plugged-ecb0c91b-f122-4c9d-8c9f-480b703a9915 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 638.236776] env[69328]: WARNING nova.compute.manager [req-a65c6684-77d9-4860-a972-7804b78ee300 req-b4337fa1-86e9-432e-a7d9-2d16863f5e33 service nova] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Received unexpected event network-vif-plugged-ecb0c91b-f122-4c9d-8c9f-480b703a9915 for instance with vm_state building and task_state spawning. [ 638.236776] env[69328]: DEBUG nova.compute.manager [req-a65c6684-77d9-4860-a972-7804b78ee300 req-b4337fa1-86e9-432e-a7d9-2d16863f5e33 service nova] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Received event network-changed-ecb0c91b-f122-4c9d-8c9f-480b703a9915 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 638.236776] env[69328]: DEBUG nova.compute.manager [req-a65c6684-77d9-4860-a972-7804b78ee300 req-b4337fa1-86e9-432e-a7d9-2d16863f5e33 service nova] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Refreshing instance network info cache due to event network-changed-ecb0c91b-f122-4c9d-8c9f-480b703a9915. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 638.236776] env[69328]: DEBUG oslo_concurrency.lockutils [req-a65c6684-77d9-4860-a972-7804b78ee300 req-b4337fa1-86e9-432e-a7d9-2d16863f5e33 service nova] Acquiring lock "refresh_cache-6102f8e6-f815-4f5f-921f-990be81fca0d" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 638.236911] env[69328]: DEBUG oslo_concurrency.lockutils [req-a65c6684-77d9-4860-a972-7804b78ee300 req-b4337fa1-86e9-432e-a7d9-2d16863f5e33 service nova] Acquired lock "refresh_cache-6102f8e6-f815-4f5f-921f-990be81fca0d" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 638.236999] env[69328]: DEBUG nova.network.neutron [req-a65c6684-77d9-4860-a972-7804b78ee300 req-b4337fa1-86e9-432e-a7d9-2d16863f5e33 service nova] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Refreshing network info cache for port ecb0c91b-f122-4c9d-8c9f-480b703a9915 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 638.374914] env[69328]: DEBUG nova.network.neutron [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Successfully updated port: a95af8f2-189b-449d-974d-b380402c6a3f {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 638.444299] env[69328]: DEBUG oslo_concurrency.lockutils [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Acquiring lock "8e3a73c1-b622-47f4-99af-71b6dba7c09b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 638.444530] env[69328]: DEBUG oslo_concurrency.lockutils [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Lock "8e3a73c1-b622-47f4-99af-71b6dba7c09b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 638.553559] env[69328]: DEBUG oslo_vmware.api [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Task: {'id': task-3272801, 'name': PowerOnVM_Task, 'duration_secs': 0.481798} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.555044] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 638.555044] env[69328]: INFO nova.compute.manager [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] Took 8.73 seconds to spawn the instance on the hypervisor. [ 638.555158] env[69328]: DEBUG nova.compute.manager [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 638.555922] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da6f4a70-625b-4573-8ceb-7aff25cbbe4b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.559173] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39cd47ae-b173-48ad-8db0-d9eaa2454cbd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.567354] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-174f4e9d-f090-4395-814a-2804a7885c46 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.608824] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a40cef76-170d-4976-b52f-372d5f62d517 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.618170] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272804, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.621577] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73e33f4c-dd52-4568-b3b1-df5ed445bb74 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.636796] env[69328]: DEBUG nova.compute.provider_tree [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 638.639797] env[69328]: DEBUG nova.compute.manager [req-c52851dd-ac61-483b-ad8c-72828d93aa13 req-52047db9-91a5-4508-a02f-2b74beb4deee service nova] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Received event network-vif-plugged-a95af8f2-189b-449d-974d-b380402c6a3f {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 638.640010] env[69328]: DEBUG oslo_concurrency.lockutils [req-c52851dd-ac61-483b-ad8c-72828d93aa13 req-52047db9-91a5-4508-a02f-2b74beb4deee service nova] Acquiring lock "46526210-2783-408d-9ecb-773f33ff0c66-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 638.640262] env[69328]: DEBUG oslo_concurrency.lockutils [req-c52851dd-ac61-483b-ad8c-72828d93aa13 req-52047db9-91a5-4508-a02f-2b74beb4deee service nova] Lock "46526210-2783-408d-9ecb-773f33ff0c66-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 638.640427] env[69328]: DEBUG oslo_concurrency.lockutils [req-c52851dd-ac61-483b-ad8c-72828d93aa13 req-52047db9-91a5-4508-a02f-2b74beb4deee service nova] Lock "46526210-2783-408d-9ecb-773f33ff0c66-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 638.640592] env[69328]: DEBUG nova.compute.manager [req-c52851dd-ac61-483b-ad8c-72828d93aa13 req-52047db9-91a5-4508-a02f-2b74beb4deee service nova] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] No waiting events found dispatching network-vif-plugged-a95af8f2-189b-449d-974d-b380402c6a3f {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 638.640752] env[69328]: WARNING nova.compute.manager [req-c52851dd-ac61-483b-ad8c-72828d93aa13 req-52047db9-91a5-4508-a02f-2b74beb4deee service nova] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Received unexpected event network-vif-plugged-a95af8f2-189b-449d-974d-b380402c6a3f for instance with vm_state building and task_state spawning. [ 638.678734] env[69328]: DEBUG oslo_vmware.api [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52430e58-7b9e-4c18-6613-8c83210a2be0, 'name': SearchDatastore_Task, 'duration_secs': 0.018561} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.679566] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a873c54-df48-4aaa-8d1c-722da0bba078 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.685254] env[69328]: DEBUG oslo_vmware.api [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Waiting for the task: (returnval){ [ 638.685254] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]527a48d9-3df9-c96c-0acc-d590102241ad" [ 638.685254] env[69328]: _type = "Task" [ 638.685254] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.693259] env[69328]: DEBUG oslo_vmware.api [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]527a48d9-3df9-c96c-0acc-d590102241ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.717098] env[69328]: DEBUG oslo_vmware.api [None req-f41c64e8-47aa-4396-9140-5604f5e9a675 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Task: {'id': task-3272805, 'name': PowerOffVM_Task, 'duration_secs': 0.239083} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.717382] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f41c64e8-47aa-4396-9140-5604f5e9a675 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] [instance: edb1a21a-6907-4198-a977-c1213e8fecc0] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 638.717549] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f41c64e8-47aa-4396-9140-5604f5e9a675 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] [instance: edb1a21a-6907-4198-a977-c1213e8fecc0] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 638.717791] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bfc620bb-0199-4709-84c4-85a10949f753 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.801253] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f41c64e8-47aa-4396-9140-5604f5e9a675 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] [instance: edb1a21a-6907-4198-a977-c1213e8fecc0] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 638.803966] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f41c64e8-47aa-4396-9140-5604f5e9a675 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] [instance: edb1a21a-6907-4198-a977-c1213e8fecc0] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 638.803966] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-f41c64e8-47aa-4396-9140-5604f5e9a675 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Deleting the datastore file [datastore2] edb1a21a-6907-4198-a977-c1213e8fecc0 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 638.803966] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0b321fbc-e142-4e09-a824-03e8ff235512 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.808044] env[69328]: DEBUG oslo_vmware.api [None req-f41c64e8-47aa-4396-9140-5604f5e9a675 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Waiting for the task: (returnval){ [ 638.808044] env[69328]: value = "task-3272807" [ 638.808044] env[69328]: _type = "Task" [ 638.808044] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.816954] env[69328]: DEBUG oslo_vmware.api [None req-f41c64e8-47aa-4396-9140-5604f5e9a675 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Task: {'id': task-3272807, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.881098] env[69328]: DEBUG oslo_concurrency.lockutils [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Acquiring lock "refresh_cache-46526210-2783-408d-9ecb-773f33ff0c66" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 638.881255] env[69328]: DEBUG oslo_concurrency.lockutils [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Acquired lock "refresh_cache-46526210-2783-408d-9ecb-773f33ff0c66" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 638.881408] env[69328]: DEBUG nova.network.neutron [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 639.008187] env[69328]: DEBUG nova.network.neutron [req-a65c6684-77d9-4860-a972-7804b78ee300 req-b4337fa1-86e9-432e-a7d9-2d16863f5e33 service nova] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Updated VIF entry in instance network info cache for port ecb0c91b-f122-4c9d-8c9f-480b703a9915. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 639.008567] env[69328]: DEBUG nova.network.neutron [req-a65c6684-77d9-4860-a972-7804b78ee300 req-b4337fa1-86e9-432e-a7d9-2d16863f5e33 service nova] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Updating instance_info_cache with network_info: [{"id": "ecb0c91b-f122-4c9d-8c9f-480b703a9915", "address": "fa:16:3e:87:cb:44", "network": {"id": "dfd21313-e126-438d-8994-8f4426a2c4b2", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-2034545348-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "082fe2c32b134a4a9600f9f124d5d863", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapecb0c91b-f1", "ovs_interfaceid": "ecb0c91b-f122-4c9d-8c9f-480b703a9915", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 639.085695] env[69328]: INFO nova.compute.manager [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] Took 25.88 seconds to build instance. [ 639.119581] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272804, 'name': CreateVM_Task, 'duration_secs': 0.565993} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.120196] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 639.121182] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 639.121361] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 639.121685] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 639.121950] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e177be7-c6c0-4988-bf6f-c10612a9bfdf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.128220] env[69328]: DEBUG oslo_vmware.api [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Waiting for the task: (returnval){ [ 639.128220] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]523d8b66-f2bb-5823-d016-e1316cd45e63" [ 639.128220] env[69328]: _type = "Task" [ 639.128220] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.138310] env[69328]: DEBUG oslo_vmware.api [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]523d8b66-f2bb-5823-d016-e1316cd45e63, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.143432] env[69328]: DEBUG nova.scheduler.client.report [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 639.201493] env[69328]: DEBUG oslo_vmware.api [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]527a48d9-3df9-c96c-0acc-d590102241ad, 'name': SearchDatastore_Task, 'duration_secs': 0.046464} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.202554] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 639.202554] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 26feb2d1-ff64-4a13-af83-b6d5fe4348e1/26feb2d1-ff64-4a13-af83-b6d5fe4348e1.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 639.202554] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-984c282a-0242-4ff7-8376-1a7bfe530441 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.209746] env[69328]: DEBUG oslo_vmware.api [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Waiting for the task: (returnval){ [ 639.209746] env[69328]: value = "task-3272808" [ 639.209746] env[69328]: _type = "Task" [ 639.209746] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.221710] env[69328]: DEBUG oslo_vmware.api [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Task: {'id': task-3272808, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.323815] env[69328]: DEBUG oslo_vmware.api [None req-f41c64e8-47aa-4396-9140-5604f5e9a675 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Task: {'id': task-3272807, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.485679} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.324195] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-f41c64e8-47aa-4396-9140-5604f5e9a675 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 639.324607] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f41c64e8-47aa-4396-9140-5604f5e9a675 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] [instance: edb1a21a-6907-4198-a977-c1213e8fecc0] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 639.324898] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f41c64e8-47aa-4396-9140-5604f5e9a675 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] [instance: edb1a21a-6907-4198-a977-c1213e8fecc0] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 639.325161] env[69328]: INFO nova.compute.manager [None req-f41c64e8-47aa-4396-9140-5604f5e9a675 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] [instance: edb1a21a-6907-4198-a977-c1213e8fecc0] Took 1.14 seconds to destroy the instance on the hypervisor. [ 639.325692] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f41c64e8-47aa-4396-9140-5604f5e9a675 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 639.326594] env[69328]: DEBUG nova.compute.manager [-] [instance: edb1a21a-6907-4198-a977-c1213e8fecc0] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 639.326594] env[69328]: DEBUG nova.network.neutron [-] [instance: edb1a21a-6907-4198-a977-c1213e8fecc0] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 639.431516] env[69328]: DEBUG nova.network.neutron [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 639.514681] env[69328]: DEBUG oslo_concurrency.lockutils [req-a65c6684-77d9-4860-a972-7804b78ee300 req-b4337fa1-86e9-432e-a7d9-2d16863f5e33 service nova] Releasing lock "refresh_cache-6102f8e6-f815-4f5f-921f-990be81fca0d" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 639.589090] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e5eb47da-b5a9-413f-9211-182914468c69 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Lock "7b348a95-3ab2-4112-87e3-b17504c0a302" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.143s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 639.642338] env[69328]: DEBUG oslo_vmware.api [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]523d8b66-f2bb-5823-d016-e1316cd45e63, 'name': SearchDatastore_Task, 'duration_secs': 0.026487} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.644028] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 639.644028] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 639.644028] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 639.644028] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 639.644479] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 639.644479] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d0577a18-2556-456b-9fbc-6609cef606df {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.656021] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 639.656021] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 639.656021] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.680s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 639.656021] env[69328]: DEBUG nova.compute.manager [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 639.660176] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd731834-1b72-49c1-aa6f-7276678b103c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.664379] env[69328]: DEBUG oslo_concurrency.lockutils [None req-72e7da28-9158-4a96-beb1-a3bb043ae1d6 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.229s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 639.664615] env[69328]: DEBUG nova.objects.instance [None req-72e7da28-9158-4a96-beb1-a3bb043ae1d6 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Lazy-loading 'resources' on Instance uuid 230c6278-65af-4f5d-b817-0b695086c29d {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 639.673741] env[69328]: DEBUG oslo_vmware.api [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Waiting for the task: (returnval){ [ 639.673741] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f6591b-fe7a-2ae0-da2c-978a70a7f5a3" [ 639.673741] env[69328]: _type = "Task" [ 639.673741] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.682582] env[69328]: DEBUG nova.network.neutron [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Updating instance_info_cache with network_info: [{"id": "a95af8f2-189b-449d-974d-b380402c6a3f", "address": "fa:16:3e:2d:14:69", "network": {"id": "749d8822-e19e-4761-9e9f-f5717a49481a", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-136341088-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6895a4954cb4bc89dab40eb3f655606", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa95af8f2-18", "ovs_interfaceid": "a95af8f2-189b-449d-974d-b380402c6a3f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 639.688622] env[69328]: DEBUG oslo_vmware.api [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f6591b-fe7a-2ae0-da2c-978a70a7f5a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.721013] env[69328]: DEBUG oslo_concurrency.lockutils [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Acquiring lock "e1eec0ce-8df7-402a-b628-5dfdc11949e7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 639.725196] env[69328]: DEBUG oslo_concurrency.lockutils [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Lock "e1eec0ce-8df7-402a-b628-5dfdc11949e7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 639.730204] env[69328]: DEBUG oslo_vmware.api [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Task: {'id': task-3272808, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.095590] env[69328]: DEBUG nova.compute.manager [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] [instance: c3673531-9167-4d33-b8ce-d6afa5e589bc] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 640.173817] env[69328]: DEBUG nova.compute.utils [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 640.175360] env[69328]: DEBUG nova.compute.manager [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 640.175571] env[69328]: DEBUG nova.network.neutron [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 640.186491] env[69328]: DEBUG oslo_concurrency.lockutils [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Releasing lock "refresh_cache-46526210-2783-408d-9ecb-773f33ff0c66" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 640.186491] env[69328]: DEBUG nova.compute.manager [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Instance network_info: |[{"id": "a95af8f2-189b-449d-974d-b380402c6a3f", "address": "fa:16:3e:2d:14:69", "network": {"id": "749d8822-e19e-4761-9e9f-f5717a49481a", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-136341088-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6895a4954cb4bc89dab40eb3f655606", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa95af8f2-18", "ovs_interfaceid": "a95af8f2-189b-449d-974d-b380402c6a3f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 640.187690] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2d:14:69', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c5652322-9f10-4996-baed-4c0aa13a1b4e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a95af8f2-189b-449d-974d-b380402c6a3f', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 640.197577] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Creating folder: Project (c6895a4954cb4bc89dab40eb3f655606). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 640.197913] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-81563ddd-b692-4400-a8f0-4de228fc1ed4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.206063] env[69328]: DEBUG oslo_vmware.api [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f6591b-fe7a-2ae0-da2c-978a70a7f5a3, 'name': SearchDatastore_Task, 'duration_secs': 0.073309} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.207339] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-957d1561-efc0-44df-979e-d193c7924637 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.215661] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Created folder: Project (c6895a4954cb4bc89dab40eb3f655606) in parent group-v653649. [ 640.215835] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Creating folder: Instances. Parent ref: group-v653701. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 640.223304] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6f27ca58-feaf-44b0-ba7f-cd51a280feb6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.226196] env[69328]: DEBUG nova.network.neutron [-] [instance: edb1a21a-6907-4198-a977-c1213e8fecc0] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 640.227597] env[69328]: DEBUG oslo_vmware.api [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Waiting for the task: (returnval){ [ 640.227597] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]522d378d-84cc-fa93-434d-6eacb734fe40" [ 640.227597] env[69328]: _type = "Task" [ 640.227597] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.239633] env[69328]: DEBUG oslo_vmware.api [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Task: {'id': task-3272808, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.818135} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.241402] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 26feb2d1-ff64-4a13-af83-b6d5fe4348e1/26feb2d1-ff64-4a13-af83-b6d5fe4348e1.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 640.241659] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 640.241908] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Created folder: Instances in parent group-v653701. [ 640.242827] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 640.242827] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-73ad3e02-328c-4816-a935-4f3f942d073a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.244361] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 640.250179] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-63b99601-391c-44e3-8a49-ffec85dd0d7b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.263837] env[69328]: DEBUG oslo_vmware.api [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]522d378d-84cc-fa93-434d-6eacb734fe40, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.270482] env[69328]: DEBUG oslo_vmware.api [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Waiting for the task: (returnval){ [ 640.270482] env[69328]: value = "task-3272811" [ 640.270482] env[69328]: _type = "Task" [ 640.270482] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.271782] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 640.271782] env[69328]: value = "task-3272812" [ 640.271782] env[69328]: _type = "Task" [ 640.271782] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.276050] env[69328]: DEBUG nova.policy [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ca4aa5826fac47c3bdbc3e8a422f1177', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '65edf1e9f4344038878d05021bbdef78', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 640.289746] env[69328]: DEBUG oslo_vmware.api [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Task: {'id': task-3272811, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.292734] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272812, 'name': CreateVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.620616] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 640.670376] env[69328]: DEBUG nova.network.neutron [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Successfully created port: efc73e76-7767-42a3-b5a4-3891364b487f {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 640.684888] env[69328]: DEBUG nova.compute.manager [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 640.726331] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce44aa31-7182-4e51-843c-0fd0c6afcec3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.732985] env[69328]: INFO nova.compute.manager [-] [instance: edb1a21a-6907-4198-a977-c1213e8fecc0] Took 1.41 seconds to deallocate network for instance. [ 640.737152] env[69328]: DEBUG nova.compute.manager [req-517b07d3-0e39-4c42-8cd9-5eab61d6b751 req-fe278f16-6828-4ad9-bfab-72ab5bea1e6a service nova] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Received event network-changed-e401a888-b320-4f5f-bcdc-5d8c86b99ce7 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 640.737152] env[69328]: DEBUG nova.compute.manager [req-517b07d3-0e39-4c42-8cd9-5eab61d6b751 req-fe278f16-6828-4ad9-bfab-72ab5bea1e6a service nova] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Refreshing instance network info cache due to event network-changed-e401a888-b320-4f5f-bcdc-5d8c86b99ce7. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 640.737152] env[69328]: DEBUG oslo_concurrency.lockutils [req-517b07d3-0e39-4c42-8cd9-5eab61d6b751 req-fe278f16-6828-4ad9-bfab-72ab5bea1e6a service nova] Acquiring lock "refresh_cache-bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 640.737152] env[69328]: DEBUG oslo_concurrency.lockutils [req-517b07d3-0e39-4c42-8cd9-5eab61d6b751 req-fe278f16-6828-4ad9-bfab-72ab5bea1e6a service nova] Acquired lock "refresh_cache-bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 640.737152] env[69328]: DEBUG nova.network.neutron [req-517b07d3-0e39-4c42-8cd9-5eab61d6b751 req-fe278f16-6828-4ad9-bfab-72ab5bea1e6a service nova] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Refreshing network info cache for port e401a888-b320-4f5f-bcdc-5d8c86b99ce7 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 640.759991] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b514dba4-84c5-4f2f-ac40-0c095c287f4e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.771274] env[69328]: DEBUG oslo_vmware.api [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]522d378d-84cc-fa93-434d-6eacb734fe40, 'name': SearchDatastore_Task, 'duration_secs': 0.050015} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.774225] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 640.774507] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 6102f8e6-f815-4f5f-921f-990be81fca0d/6102f8e6-f815-4f5f-921f-990be81fca0d.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 640.806339] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-475031d4-b4a2-46c3-80ff-658ad7d23f2b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.821054] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a98d268c-8f79-4a34-8b64-ab1d9ca01a53 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.832547] env[69328]: DEBUG oslo_vmware.api [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Task: {'id': task-3272811, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068908} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.839567] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 640.843440] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272812, 'name': CreateVM_Task} progress is 25%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.843440] env[69328]: DEBUG oslo_vmware.api [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Waiting for the task: (returnval){ [ 640.843440] env[69328]: value = "task-3272813" [ 640.843440] env[69328]: _type = "Task" [ 640.843440] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.843440] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63d58aab-e860-4f90-804a-cfdb4c3d718f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.848442] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b28251d4-b6d6-4c7d-89eb-716400e2b139 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.873751] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Reconfiguring VM instance instance-0000000c to attach disk [datastore1] 26feb2d1-ff64-4a13-af83-b6d5fe4348e1/26feb2d1-ff64-4a13-af83-b6d5fe4348e1.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 640.886297] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8f391478-d925-4aba-bab8-33e2d3c1b529 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.903588] env[69328]: DEBUG oslo_vmware.api [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': task-3272813, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.904138] env[69328]: DEBUG nova.compute.provider_tree [None req-72e7da28-9158-4a96-beb1-a3bb043ae1d6 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 640.910589] env[69328]: DEBUG oslo_vmware.api [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Waiting for the task: (returnval){ [ 640.910589] env[69328]: value = "task-3272814" [ 640.910589] env[69328]: _type = "Task" [ 640.910589] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.919089] env[69328]: DEBUG oslo_vmware.api [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Task: {'id': task-3272814, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.257759] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f41c64e8-47aa-4396-9140-5604f5e9a675 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 641.319237] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272812, 'name': CreateVM_Task, 'duration_secs': 0.790355} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.319500] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 641.320541] env[69328]: DEBUG oslo_concurrency.lockutils [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 641.320541] env[69328]: DEBUG oslo_concurrency.lockutils [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 641.320712] env[69328]: DEBUG oslo_concurrency.lockutils [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 641.320977] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38ece38f-5ec7-414d-8be5-7a7e99763a6f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.326718] env[69328]: DEBUG oslo_concurrency.lockutils [None req-da766223-5256-485d-87d7-8ec2ca958509 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Acquiring lock "7b348a95-3ab2-4112-87e3-b17504c0a302" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 641.326963] env[69328]: DEBUG oslo_concurrency.lockutils [None req-da766223-5256-485d-87d7-8ec2ca958509 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Lock "7b348a95-3ab2-4112-87e3-b17504c0a302" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 641.327222] env[69328]: DEBUG oslo_concurrency.lockutils [None req-da766223-5256-485d-87d7-8ec2ca958509 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Acquiring lock "7b348a95-3ab2-4112-87e3-b17504c0a302-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 641.327394] env[69328]: DEBUG oslo_concurrency.lockutils [None req-da766223-5256-485d-87d7-8ec2ca958509 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Lock "7b348a95-3ab2-4112-87e3-b17504c0a302-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 641.327749] env[69328]: DEBUG oslo_concurrency.lockutils [None req-da766223-5256-485d-87d7-8ec2ca958509 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Lock "7b348a95-3ab2-4112-87e3-b17504c0a302-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 641.330673] env[69328]: DEBUG oslo_vmware.api [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Waiting for the task: (returnval){ [ 641.330673] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b56cec-7518-16bb-5da0-a69014e67e02" [ 641.330673] env[69328]: _type = "Task" [ 641.330673] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.330673] env[69328]: INFO nova.compute.manager [None req-da766223-5256-485d-87d7-8ec2ca958509 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] Terminating instance [ 641.341927] env[69328]: DEBUG oslo_vmware.api [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b56cec-7518-16bb-5da0-a69014e67e02, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.365251] env[69328]: DEBUG oslo_vmware.api [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': task-3272813, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.412546] env[69328]: DEBUG nova.scheduler.client.report [None req-72e7da28-9158-4a96-beb1-a3bb043ae1d6 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 641.429518] env[69328]: DEBUG oslo_vmware.api [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Task: {'id': task-3272814, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.696047] env[69328]: DEBUG nova.compute.manager [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 641.729764] env[69328]: DEBUG nova.virt.hardware [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 641.730279] env[69328]: DEBUG nova.virt.hardware [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 641.730592] env[69328]: DEBUG nova.virt.hardware [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 641.730976] env[69328]: DEBUG nova.virt.hardware [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 641.731288] env[69328]: DEBUG nova.virt.hardware [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 641.731579] env[69328]: DEBUG nova.virt.hardware [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 641.731967] env[69328]: DEBUG nova.virt.hardware [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 641.732299] env[69328]: DEBUG nova.virt.hardware [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 641.732653] env[69328]: DEBUG nova.virt.hardware [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 641.732976] env[69328]: DEBUG nova.virt.hardware [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 641.733324] env[69328]: DEBUG nova.virt.hardware [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 641.734812] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7cb22af-ac91-48be-80a0-9c94ba405dd8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.740234] env[69328]: DEBUG nova.network.neutron [req-517b07d3-0e39-4c42-8cd9-5eab61d6b751 req-fe278f16-6828-4ad9-bfab-72ab5bea1e6a service nova] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Updated VIF entry in instance network info cache for port e401a888-b320-4f5f-bcdc-5d8c86b99ce7. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 641.740359] env[69328]: DEBUG nova.network.neutron [req-517b07d3-0e39-4c42-8cd9-5eab61d6b751 req-fe278f16-6828-4ad9-bfab-72ab5bea1e6a service nova] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Updating instance_info_cache with network_info: [{"id": "e401a888-b320-4f5f-bcdc-5d8c86b99ce7", "address": "fa:16:3e:34:16:b5", "network": {"id": "023046e8-e113-4ce9-95d7-1c04fc034ba6", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-89845670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.162", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1467d48a61f7410b8f6d5a981d169563", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape401a888-b3", "ovs_interfaceid": "e401a888-b320-4f5f-bcdc-5d8c86b99ce7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 641.747539] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91ff2677-71b6-4388-b0ca-43a823016096 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.841191] env[69328]: DEBUG nova.compute.manager [None req-da766223-5256-485d-87d7-8ec2ca958509 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 641.841401] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-da766223-5256-485d-87d7-8ec2ca958509 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 641.841703] env[69328]: DEBUG oslo_vmware.api [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b56cec-7518-16bb-5da0-a69014e67e02, 'name': SearchDatastore_Task, 'duration_secs': 0.054844} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.842421] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbe895b0-8cd3-4ec9-a38e-1920df106f0c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.845093] env[69328]: DEBUG oslo_concurrency.lockutils [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 641.845386] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 641.845614] env[69328]: DEBUG oslo_concurrency.lockutils [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 641.845784] env[69328]: DEBUG oslo_concurrency.lockutils [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 641.845985] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 641.846278] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cc5af153-5091-4faa-b073-e3899cf7a125 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.852618] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-da766223-5256-485d-87d7-8ec2ca958509 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 641.852618] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-36dcb551-a41e-4796-8b9e-cdb7a9de93f6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.858839] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 641.859162] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 641.863817] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4747f425-d593-48e1-b30a-9f9464888c95 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.865982] env[69328]: DEBUG oslo_vmware.api [None req-da766223-5256-485d-87d7-8ec2ca958509 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Waiting for the task: (returnval){ [ 641.865982] env[69328]: value = "task-3272815" [ 641.865982] env[69328]: _type = "Task" [ 641.865982] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.866342] env[69328]: DEBUG oslo_vmware.api [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': task-3272813, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.610816} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.866602] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 6102f8e6-f815-4f5f-921f-990be81fca0d/6102f8e6-f815-4f5f-921f-990be81fca0d.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 641.866858] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 641.870073] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-32ef713e-482d-4dae-a6db-3379b4d5ccfa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.873403] env[69328]: DEBUG oslo_vmware.api [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Waiting for the task: (returnval){ [ 641.873403] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5270a058-f07c-9e10-4bd6-526b44165eea" [ 641.873403] env[69328]: _type = "Task" [ 641.873403] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.881551] env[69328]: DEBUG oslo_vmware.api [None req-da766223-5256-485d-87d7-8ec2ca958509 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Task: {'id': task-3272815, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.881906] env[69328]: DEBUG oslo_vmware.api [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Waiting for the task: (returnval){ [ 641.881906] env[69328]: value = "task-3272816" [ 641.881906] env[69328]: _type = "Task" [ 641.881906] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.889074] env[69328]: DEBUG oslo_vmware.api [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5270a058-f07c-9e10-4bd6-526b44165eea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.894611] env[69328]: DEBUG oslo_vmware.api [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': task-3272816, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.926658] env[69328]: DEBUG oslo_concurrency.lockutils [None req-72e7da28-9158-4a96-beb1-a3bb043ae1d6 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.262s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 641.928577] env[69328]: DEBUG oslo_vmware.api [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Task: {'id': task-3272814, 'name': ReconfigVM_Task, 'duration_secs': 0.807161} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.929077] env[69328]: DEBUG oslo_concurrency.lockutils [None req-70ea211f-5811-4681-9f41-3a22cf226773 tempest-DeleteServersAdminTestJSON-1713601757 tempest-DeleteServersAdminTestJSON-1713601757-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.289s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 641.930046] env[69328]: DEBUG nova.objects.instance [None req-70ea211f-5811-4681-9f41-3a22cf226773 tempest-DeleteServersAdminTestJSON-1713601757 tempest-DeleteServersAdminTestJSON-1713601757-project-admin] Lazy-loading 'resources' on Instance uuid caba3b5c-db15-4de6-8d3d-41f6751f1b83 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 641.932199] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Reconfigured VM instance instance-0000000c to attach disk [datastore1] 26feb2d1-ff64-4a13-af83-b6d5fe4348e1/26feb2d1-ff64-4a13-af83-b6d5fe4348e1.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 641.932199] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9660acb2-413b-4e2f-b796-bc95b94a96bd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.938253] env[69328]: DEBUG oslo_vmware.api [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Waiting for the task: (returnval){ [ 641.938253] env[69328]: value = "task-3272817" [ 641.938253] env[69328]: _type = "Task" [ 641.938253] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.953085] env[69328]: DEBUG oslo_vmware.api [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Task: {'id': task-3272817, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.961495] env[69328]: INFO nova.scheduler.client.report [None req-72e7da28-9158-4a96-beb1-a3bb043ae1d6 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Deleted allocations for instance 230c6278-65af-4f5d-b817-0b695086c29d [ 642.244322] env[69328]: DEBUG oslo_concurrency.lockutils [req-517b07d3-0e39-4c42-8cd9-5eab61d6b751 req-fe278f16-6828-4ad9-bfab-72ab5bea1e6a service nova] Releasing lock "refresh_cache-bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 642.244625] env[69328]: DEBUG nova.compute.manager [req-517b07d3-0e39-4c42-8cd9-5eab61d6b751 req-fe278f16-6828-4ad9-bfab-72ab5bea1e6a service nova] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Received event network-changed-a95af8f2-189b-449d-974d-b380402c6a3f {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 642.244812] env[69328]: DEBUG nova.compute.manager [req-517b07d3-0e39-4c42-8cd9-5eab61d6b751 req-fe278f16-6828-4ad9-bfab-72ab5bea1e6a service nova] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Refreshing instance network info cache due to event network-changed-a95af8f2-189b-449d-974d-b380402c6a3f. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 642.245135] env[69328]: DEBUG oslo_concurrency.lockutils [req-517b07d3-0e39-4c42-8cd9-5eab61d6b751 req-fe278f16-6828-4ad9-bfab-72ab5bea1e6a service nova] Acquiring lock "refresh_cache-46526210-2783-408d-9ecb-773f33ff0c66" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 642.245211] env[69328]: DEBUG oslo_concurrency.lockutils [req-517b07d3-0e39-4c42-8cd9-5eab61d6b751 req-fe278f16-6828-4ad9-bfab-72ab5bea1e6a service nova] Acquired lock "refresh_cache-46526210-2783-408d-9ecb-773f33ff0c66" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 642.245340] env[69328]: DEBUG nova.network.neutron [req-517b07d3-0e39-4c42-8cd9-5eab61d6b751 req-fe278f16-6828-4ad9-bfab-72ab5bea1e6a service nova] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Refreshing network info cache for port a95af8f2-189b-449d-974d-b380402c6a3f {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 642.376885] env[69328]: DEBUG oslo_vmware.api [None req-da766223-5256-485d-87d7-8ec2ca958509 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Task: {'id': task-3272815, 'name': PowerOffVM_Task, 'duration_secs': 0.208658} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.381135] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-da766223-5256-485d-87d7-8ec2ca958509 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 642.381613] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-da766223-5256-485d-87d7-8ec2ca958509 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 642.381958] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-87d9f626-ea6f-4f62-8248-b10abd210090 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.393136] env[69328]: DEBUG oslo_vmware.api [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5270a058-f07c-9e10-4bd6-526b44165eea, 'name': SearchDatastore_Task, 'duration_secs': 0.018671} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.394457] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3929a48-aa13-4390-817a-ff9add7bae0d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.400308] env[69328]: DEBUG oslo_vmware.api [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': task-3272816, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067403} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.401241] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 642.402117] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbd64a39-3392-421f-b765-f8830daad43e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.406445] env[69328]: DEBUG oslo_vmware.api [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Waiting for the task: (returnval){ [ 642.406445] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]528358d4-2a80-0f47-9425-721f22446ea8" [ 642.406445] env[69328]: _type = "Task" [ 642.406445] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.429574] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Reconfiguring VM instance instance-00000011 to attach disk [datastore1] 6102f8e6-f815-4f5f-921f-990be81fca0d/6102f8e6-f815-4f5f-921f-990be81fca0d.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 642.430507] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3e6e71db-88a4-43e1-b314-e32847e926e6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.451335] env[69328]: DEBUG oslo_vmware.api [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]528358d4-2a80-0f47-9425-721f22446ea8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.456675] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-da766223-5256-485d-87d7-8ec2ca958509 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 642.456990] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-da766223-5256-485d-87d7-8ec2ca958509 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 642.457230] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-da766223-5256-485d-87d7-8ec2ca958509 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Deleting the datastore file [datastore2] 7b348a95-3ab2-4112-87e3-b17504c0a302 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 642.457533] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f43a60cb-17dd-43b7-9cf4-81e74daf5e9b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.461191] env[69328]: DEBUG oslo_vmware.api [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Waiting for the task: (returnval){ [ 642.461191] env[69328]: value = "task-3272819" [ 642.461191] env[69328]: _type = "Task" [ 642.461191] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.464438] env[69328]: DEBUG oslo_vmware.api [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Task: {'id': task-3272817, 'name': Rename_Task, 'duration_secs': 0.20723} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.469609] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 642.469609] env[69328]: DEBUG oslo_vmware.api [None req-da766223-5256-485d-87d7-8ec2ca958509 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Waiting for the task: (returnval){ [ 642.469609] env[69328]: value = "task-3272820" [ 642.469609] env[69328]: _type = "Task" [ 642.469609] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.469609] env[69328]: DEBUG nova.network.neutron [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Successfully updated port: efc73e76-7767-42a3-b5a4-3891364b487f {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 642.472486] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d94b152f-95e4-4127-913a-207d04b90bd3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.474843] env[69328]: DEBUG oslo_concurrency.lockutils [None req-72e7da28-9158-4a96-beb1-a3bb043ae1d6 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Lock "230c6278-65af-4f5d-b817-0b695086c29d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.942s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 642.482814] env[69328]: DEBUG oslo_vmware.api [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': task-3272819, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.488812] env[69328]: DEBUG oslo_vmware.api [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Waiting for the task: (returnval){ [ 642.488812] env[69328]: value = "task-3272821" [ 642.488812] env[69328]: _type = "Task" [ 642.488812] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.492652] env[69328]: DEBUG oslo_vmware.api [None req-da766223-5256-485d-87d7-8ec2ca958509 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Task: {'id': task-3272820, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.503159] env[69328]: DEBUG oslo_vmware.api [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Task: {'id': task-3272821, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.916210] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13962644-1644-4788-ae8e-235c77d66772 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.929115] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43418035-e645-4bab-bb1e-3b9a04544815 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.933570] env[69328]: DEBUG oslo_vmware.api [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]528358d4-2a80-0f47-9425-721f22446ea8, 'name': SearchDatastore_Task, 'duration_secs': 0.040246} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.933570] env[69328]: DEBUG oslo_concurrency.lockutils [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 642.933570] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 46526210-2783-408d-9ecb-773f33ff0c66/46526210-2783-408d-9ecb-773f33ff0c66.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 642.933570] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-46b91188-241a-4891-9326-ab3a11de8929 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.961744] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dac73e9-b462-42a8-aec1-ebb5c616bd8f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.966630] env[69328]: DEBUG oslo_vmware.api [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Waiting for the task: (returnval){ [ 642.966630] env[69328]: value = "task-3272822" [ 642.966630] env[69328]: _type = "Task" [ 642.966630] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.978715] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a0336e7-76e0-47a6-b31b-07fdd66ad401 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.987457] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Acquiring lock "refresh_cache-e92953f4-b634-4ef9-a5ad-63a886cfa007" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 642.987599] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Acquired lock "refresh_cache-e92953f4-b634-4ef9-a5ad-63a886cfa007" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 642.987744] env[69328]: DEBUG nova.network.neutron [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 642.989084] env[69328]: DEBUG oslo_vmware.api [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Task: {'id': task-3272822, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.989422] env[69328]: DEBUG oslo_vmware.api [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': task-3272819, 'name': ReconfigVM_Task, 'duration_secs': 0.294346} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.995420] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Reconfigured VM instance instance-00000011 to attach disk [datastore1] 6102f8e6-f815-4f5f-921f-990be81fca0d/6102f8e6-f815-4f5f-921f-990be81fca0d.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 642.996186] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-98b7900b-903c-41d5-afd5-7aa714194d5a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.013496] env[69328]: DEBUG nova.compute.provider_tree [None req-70ea211f-5811-4681-9f41-3a22cf226773 tempest-DeleteServersAdminTestJSON-1713601757 tempest-DeleteServersAdminTestJSON-1713601757-project-admin] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 643.018037] env[69328]: DEBUG oslo_vmware.api [None req-da766223-5256-485d-87d7-8ec2ca958509 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Task: {'id': task-3272820, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.550247} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.019659] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-da766223-5256-485d-87d7-8ec2ca958509 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 643.019659] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-da766223-5256-485d-87d7-8ec2ca958509 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 643.019659] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-da766223-5256-485d-87d7-8ec2ca958509 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 643.019659] env[69328]: INFO nova.compute.manager [None req-da766223-5256-485d-87d7-8ec2ca958509 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] Took 1.18 seconds to destroy the instance on the hypervisor. [ 643.019659] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-da766223-5256-485d-87d7-8ec2ca958509 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 643.020200] env[69328]: DEBUG nova.compute.manager [-] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 643.020798] env[69328]: DEBUG nova.network.neutron [-] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 643.026658] env[69328]: DEBUG oslo_vmware.api [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Task: {'id': task-3272821, 'name': PowerOnVM_Task} progress is 78%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.026942] env[69328]: DEBUG oslo_vmware.api [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Waiting for the task: (returnval){ [ 643.026942] env[69328]: value = "task-3272823" [ 643.026942] env[69328]: _type = "Task" [ 643.026942] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.036847] env[69328]: DEBUG oslo_vmware.api [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': task-3272823, 'name': Rename_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.045550] env[69328]: DEBUG nova.network.neutron [req-517b07d3-0e39-4c42-8cd9-5eab61d6b751 req-fe278f16-6828-4ad9-bfab-72ab5bea1e6a service nova] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Updated VIF entry in instance network info cache for port a95af8f2-189b-449d-974d-b380402c6a3f. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 643.045906] env[69328]: DEBUG nova.network.neutron [req-517b07d3-0e39-4c42-8cd9-5eab61d6b751 req-fe278f16-6828-4ad9-bfab-72ab5bea1e6a service nova] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Updating instance_info_cache with network_info: [{"id": "a95af8f2-189b-449d-974d-b380402c6a3f", "address": "fa:16:3e:2d:14:69", "network": {"id": "749d8822-e19e-4761-9e9f-f5717a49481a", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-136341088-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6895a4954cb4bc89dab40eb3f655606", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa95af8f2-18", "ovs_interfaceid": "a95af8f2-189b-449d-974d-b380402c6a3f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 643.058017] env[69328]: DEBUG nova.compute.manager [req-d12973ee-4056-4b9e-8d7e-59b9c74c1e87 req-8ea5b7e4-2875-4029-82e1-3042b667955b service nova] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Received event network-vif-plugged-efc73e76-7767-42a3-b5a4-3891364b487f {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 643.058017] env[69328]: DEBUG oslo_concurrency.lockutils [req-d12973ee-4056-4b9e-8d7e-59b9c74c1e87 req-8ea5b7e4-2875-4029-82e1-3042b667955b service nova] Acquiring lock "e92953f4-b634-4ef9-a5ad-63a886cfa007-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 643.058017] env[69328]: DEBUG oslo_concurrency.lockutils [req-d12973ee-4056-4b9e-8d7e-59b9c74c1e87 req-8ea5b7e4-2875-4029-82e1-3042b667955b service nova] Lock "e92953f4-b634-4ef9-a5ad-63a886cfa007-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 643.058017] env[69328]: DEBUG oslo_concurrency.lockutils [req-d12973ee-4056-4b9e-8d7e-59b9c74c1e87 req-8ea5b7e4-2875-4029-82e1-3042b667955b service nova] Lock "e92953f4-b634-4ef9-a5ad-63a886cfa007-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 643.058017] env[69328]: DEBUG nova.compute.manager [req-d12973ee-4056-4b9e-8d7e-59b9c74c1e87 req-8ea5b7e4-2875-4029-82e1-3042b667955b service nova] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] No waiting events found dispatching network-vif-plugged-efc73e76-7767-42a3-b5a4-3891364b487f {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 643.058381] env[69328]: WARNING nova.compute.manager [req-d12973ee-4056-4b9e-8d7e-59b9c74c1e87 req-8ea5b7e4-2875-4029-82e1-3042b667955b service nova] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Received unexpected event network-vif-plugged-efc73e76-7767-42a3-b5a4-3891364b487f for instance with vm_state building and task_state spawning. [ 643.058381] env[69328]: DEBUG nova.compute.manager [req-d12973ee-4056-4b9e-8d7e-59b9c74c1e87 req-8ea5b7e4-2875-4029-82e1-3042b667955b service nova] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Received event network-changed-efc73e76-7767-42a3-b5a4-3891364b487f {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 643.058381] env[69328]: DEBUG nova.compute.manager [req-d12973ee-4056-4b9e-8d7e-59b9c74c1e87 req-8ea5b7e4-2875-4029-82e1-3042b667955b service nova] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Refreshing instance network info cache due to event network-changed-efc73e76-7767-42a3-b5a4-3891364b487f. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 643.058381] env[69328]: DEBUG oslo_concurrency.lockutils [req-d12973ee-4056-4b9e-8d7e-59b9c74c1e87 req-8ea5b7e4-2875-4029-82e1-3042b667955b service nova] Acquiring lock "refresh_cache-e92953f4-b634-4ef9-a5ad-63a886cfa007" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 643.478469] env[69328]: DEBUG oslo_vmware.api [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Task: {'id': task-3272822, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.507897] env[69328]: DEBUG oslo_vmware.api [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Task: {'id': task-3272821, 'name': PowerOnVM_Task, 'duration_secs': 0.735654} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.508200] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 643.508719] env[69328]: DEBUG nova.compute.manager [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 643.509517] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2eb6496-9aa3-4112-853d-3a80797b3bc1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.519992] env[69328]: DEBUG nova.scheduler.client.report [None req-70ea211f-5811-4681-9f41-3a22cf226773 tempest-DeleteServersAdminTestJSON-1713601757 tempest-DeleteServersAdminTestJSON-1713601757-project-admin] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 643.537861] env[69328]: DEBUG oslo_vmware.api [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': task-3272823, 'name': Rename_Task, 'duration_secs': 0.154955} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.539719] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 643.539719] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c1642e68-f6aa-40b2-adc5-1a36ab5c2a4d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.545315] env[69328]: DEBUG oslo_vmware.api [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Waiting for the task: (returnval){ [ 643.545315] env[69328]: value = "task-3272824" [ 643.545315] env[69328]: _type = "Task" [ 643.545315] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.552034] env[69328]: DEBUG oslo_concurrency.lockutils [req-517b07d3-0e39-4c42-8cd9-5eab61d6b751 req-fe278f16-6828-4ad9-bfab-72ab5bea1e6a service nova] Releasing lock "refresh_cache-46526210-2783-408d-9ecb-773f33ff0c66" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 643.552034] env[69328]: DEBUG nova.compute.manager [req-517b07d3-0e39-4c42-8cd9-5eab61d6b751 req-fe278f16-6828-4ad9-bfab-72ab5bea1e6a service nova] [instance: edb1a21a-6907-4198-a977-c1213e8fecc0] Received event network-vif-deleted-c9854116-a48f-47e7-8b3e-7a2a04699cc8 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 643.552034] env[69328]: DEBUG nova.network.neutron [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 643.558595] env[69328]: DEBUG oslo_vmware.api [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': task-3272824, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.753610] env[69328]: DEBUG nova.network.neutron [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Updating instance_info_cache with network_info: [{"id": "efc73e76-7767-42a3-b5a4-3891364b487f", "address": "fa:16:3e:0d:8d:73", "network": {"id": "2060ab72-61b3-4aea-bcdd-0b8a76a11fc7", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1995944610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65edf1e9f4344038878d05021bbdef78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefc73e76-77", "ovs_interfaceid": "efc73e76-7767-42a3-b5a4-3891364b487f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 643.980419] env[69328]: DEBUG oslo_vmware.api [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Task: {'id': task-3272822, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.525558} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.980703] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 46526210-2783-408d-9ecb-773f33ff0c66/46526210-2783-408d-9ecb-773f33ff0c66.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 643.980872] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 643.981169] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b749bb26-660c-4862-9882-64e1b0716622 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.989670] env[69328]: DEBUG oslo_vmware.api [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Waiting for the task: (returnval){ [ 643.989670] env[69328]: value = "task-3272825" [ 643.989670] env[69328]: _type = "Task" [ 643.989670] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.001900] env[69328]: DEBUG oslo_vmware.api [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Task: {'id': task-3272825, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.034035] env[69328]: DEBUG oslo_concurrency.lockutils [None req-70ea211f-5811-4681-9f41-3a22cf226773 tempest-DeleteServersAdminTestJSON-1713601757 tempest-DeleteServersAdminTestJSON-1713601757-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.104s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 644.046194] env[69328]: DEBUG oslo_concurrency.lockutils [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 25.889s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 644.053318] env[69328]: DEBUG nova.compute.manager [req-ee115f14-7e03-4854-af74-62313e96f40b req-168a360b-c594-49f2-b0d0-9d77e80232a3 service nova] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] Received event network-vif-deleted-18c21ecf-7293-4ec4-ad46-0b4bcf9c6366 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 644.053507] env[69328]: INFO nova.compute.manager [req-ee115f14-7e03-4854-af74-62313e96f40b req-168a360b-c594-49f2-b0d0-9d77e80232a3 service nova] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] Neutron deleted interface 18c21ecf-7293-4ec4-ad46-0b4bcf9c6366; detaching it from the instance and deleting it from the info cache [ 644.053672] env[69328]: DEBUG nova.network.neutron [req-ee115f14-7e03-4854-af74-62313e96f40b req-168a360b-c594-49f2-b0d0-9d77e80232a3 service nova] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 644.055240] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 644.075141] env[69328]: DEBUG oslo_vmware.api [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': task-3272824, 'name': PowerOnVM_Task, 'duration_secs': 0.505525} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.076208] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 644.076258] env[69328]: INFO nova.compute.manager [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Took 9.44 seconds to spawn the instance on the hypervisor. [ 644.076972] env[69328]: DEBUG nova.compute.manager [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 644.080318] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45671006-76f3-427b-b162-30c261fd00cd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.082042] env[69328]: INFO nova.scheduler.client.report [None req-70ea211f-5811-4681-9f41-3a22cf226773 tempest-DeleteServersAdminTestJSON-1713601757 tempest-DeleteServersAdminTestJSON-1713601757-project-admin] Deleted allocations for instance caba3b5c-db15-4de6-8d3d-41f6751f1b83 [ 644.257125] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Releasing lock "refresh_cache-e92953f4-b634-4ef9-a5ad-63a886cfa007" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 644.258414] env[69328]: DEBUG nova.compute.manager [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Instance network_info: |[{"id": "efc73e76-7767-42a3-b5a4-3891364b487f", "address": "fa:16:3e:0d:8d:73", "network": {"id": "2060ab72-61b3-4aea-bcdd-0b8a76a11fc7", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1995944610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65edf1e9f4344038878d05021bbdef78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefc73e76-77", "ovs_interfaceid": "efc73e76-7767-42a3-b5a4-3891364b487f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 644.259549] env[69328]: DEBUG oslo_concurrency.lockutils [req-d12973ee-4056-4b9e-8d7e-59b9c74c1e87 req-8ea5b7e4-2875-4029-82e1-3042b667955b service nova] Acquired lock "refresh_cache-e92953f4-b634-4ef9-a5ad-63a886cfa007" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 644.259768] env[69328]: DEBUG nova.network.neutron [req-d12973ee-4056-4b9e-8d7e-59b9c74c1e87 req-8ea5b7e4-2875-4029-82e1-3042b667955b service nova] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Refreshing network info cache for port efc73e76-7767-42a3-b5a4-3891364b487f {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 644.261345] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0d:8d:73', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '41278529-8bd2-44a1-97c8-03967faa3ff7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'efc73e76-7767-42a3-b5a4-3891364b487f', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 644.276753] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Creating folder: Project (65edf1e9f4344038878d05021bbdef78). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 644.280363] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-68273efa-6c7e-4d08-8a39-6306b2064e1e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.293702] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Created folder: Project (65edf1e9f4344038878d05021bbdef78) in parent group-v653649. [ 644.293907] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Creating folder: Instances. Parent ref: group-v653704. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 644.294170] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d44f189c-e65d-4136-8a9c-c3f273a0b76f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.301232] env[69328]: DEBUG nova.network.neutron [-] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 644.305061] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Created folder: Instances in parent group-v653704. [ 644.305253] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 644.305507] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 644.305754] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c504dc9c-1d44-43d8-8d1a-ca312205a114 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.327670] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 644.327670] env[69328]: value = "task-3272828" [ 644.327670] env[69328]: _type = "Task" [ 644.327670] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.343213] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272828, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.501515] env[69328]: DEBUG oslo_vmware.api [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Task: {'id': task-3272825, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076671} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.501989] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 644.502913] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d777135-27ed-45da-8d2b-70d1aa20d56c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.529741] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Reconfiguring VM instance instance-00000012 to attach disk [datastore1] 46526210-2783-408d-9ecb-773f33ff0c66/46526210-2783-408d-9ecb-773f33ff0c66.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 644.532202] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c42213fa-9ef8-4260-9437-4d525ad23f39 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.562870] env[69328]: INFO nova.compute.claims [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 644.570814] env[69328]: DEBUG oslo_vmware.api [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Waiting for the task: (returnval){ [ 644.570814] env[69328]: value = "task-3272829" [ 644.570814] env[69328]: _type = "Task" [ 644.570814] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.571395] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d2b7a800-7dbd-40c2-bd8e-c9c371bf33d9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.593122] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-054cfb6f-2087-47fd-9e32-237bf7fcecde {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.606916] env[69328]: DEBUG oslo_vmware.api [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Task: {'id': task-3272829, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.608042] env[69328]: DEBUG oslo_concurrency.lockutils [None req-70ea211f-5811-4681-9f41-3a22cf226773 tempest-DeleteServersAdminTestJSON-1713601757 tempest-DeleteServersAdminTestJSON-1713601757-project-admin] Lock "caba3b5c-db15-4de6-8d3d-41f6751f1b83" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.490s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 644.620954] env[69328]: INFO nova.compute.manager [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Took 29.84 seconds to build instance. [ 644.638688] env[69328]: DEBUG nova.compute.manager [req-ee115f14-7e03-4854-af74-62313e96f40b req-168a360b-c594-49f2-b0d0-9d77e80232a3 service nova] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] Detach interface failed, port_id=18c21ecf-7293-4ec4-ad46-0b4bcf9c6366, reason: Instance 7b348a95-3ab2-4112-87e3-b17504c0a302 could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 644.743498] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Acquiring lock "1e7e9e6e-c084-480c-8653-8441c13d7514" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 644.743837] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Lock "1e7e9e6e-c084-480c-8653-8441c13d7514" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 644.772708] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Acquiring lock "146a3eef-0971-4f6e-bd24-58b38a1de0ed" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 644.772968] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Lock "146a3eef-0971-4f6e-bd24-58b38a1de0ed" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 644.805102] env[69328]: INFO nova.compute.manager [-] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] Took 1.78 seconds to deallocate network for instance. [ 644.841188] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272828, 'name': CreateVM_Task, 'duration_secs': 0.3356} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.841376] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 644.842101] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 644.842262] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 644.842622] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 644.845241] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ffd0998-26be-41a6-973d-800033b74356 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.853103] env[69328]: DEBUG oslo_vmware.api [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Waiting for the task: (returnval){ [ 644.853103] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f4afdf-0430-8777-cc06-b6e361d95c26" [ 644.853103] env[69328]: _type = "Task" [ 644.853103] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.862097] env[69328]: DEBUG oslo_vmware.api [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f4afdf-0430-8777-cc06-b6e361d95c26, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.881344] env[69328]: DEBUG nova.network.neutron [req-d12973ee-4056-4b9e-8d7e-59b9c74c1e87 req-8ea5b7e4-2875-4029-82e1-3042b667955b service nova] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Updated VIF entry in instance network info cache for port efc73e76-7767-42a3-b5a4-3891364b487f. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 644.882336] env[69328]: DEBUG nova.network.neutron [req-d12973ee-4056-4b9e-8d7e-59b9c74c1e87 req-8ea5b7e4-2875-4029-82e1-3042b667955b service nova] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Updating instance_info_cache with network_info: [{"id": "efc73e76-7767-42a3-b5a4-3891364b487f", "address": "fa:16:3e:0d:8d:73", "network": {"id": "2060ab72-61b3-4aea-bcdd-0b8a76a11fc7", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1995944610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65edf1e9f4344038878d05021bbdef78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefc73e76-77", "ovs_interfaceid": "efc73e76-7767-42a3-b5a4-3891364b487f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 645.071885] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2943fff6-bb40-405e-a549-f2cad6459000 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Acquiring lock "26feb2d1-ff64-4a13-af83-b6d5fe4348e1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 645.072348] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2943fff6-bb40-405e-a549-f2cad6459000 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Lock "26feb2d1-ff64-4a13-af83-b6d5fe4348e1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 645.072541] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2943fff6-bb40-405e-a549-f2cad6459000 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Acquiring lock "26feb2d1-ff64-4a13-af83-b6d5fe4348e1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 645.072681] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2943fff6-bb40-405e-a549-f2cad6459000 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Lock "26feb2d1-ff64-4a13-af83-b6d5fe4348e1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 645.072911] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2943fff6-bb40-405e-a549-f2cad6459000 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Lock "26feb2d1-ff64-4a13-af83-b6d5fe4348e1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 645.076049] env[69328]: INFO nova.compute.resource_tracker [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Updating resource usage from migration 3db95d5c-678c-43e6-b21d-735678c13948 [ 645.082026] env[69328]: INFO nova.compute.manager [None req-2943fff6-bb40-405e-a549-f2cad6459000 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Terminating instance [ 645.091952] env[69328]: DEBUG oslo_vmware.api [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Task: {'id': task-3272829, 'name': ReconfigVM_Task, 'duration_secs': 0.320097} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.092245] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Reconfigured VM instance instance-00000012 to attach disk [datastore1] 46526210-2783-408d-9ecb-773f33ff0c66/46526210-2783-408d-9ecb-773f33ff0c66.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 645.092868] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-00ab349b-88ca-4c7e-a302-1b62e622175c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.101956] env[69328]: DEBUG oslo_vmware.api [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Waiting for the task: (returnval){ [ 645.101956] env[69328]: value = "task-3272830" [ 645.101956] env[69328]: _type = "Task" [ 645.101956] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.113024] env[69328]: DEBUG oslo_vmware.api [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Task: {'id': task-3272830, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.123132] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b0e54aa3-6a12-473b-89fd-5980f670855e tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Lock "6102f8e6-f815-4f5f-921f-990be81fca0d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.554s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 645.313105] env[69328]: DEBUG oslo_concurrency.lockutils [None req-da766223-5256-485d-87d7-8ec2ca958509 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 645.369436] env[69328]: DEBUG oslo_vmware.api [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f4afdf-0430-8777-cc06-b6e361d95c26, 'name': SearchDatastore_Task, 'duration_secs': 0.025987} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.369792] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 645.370039] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 645.370305] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 645.370453] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 645.370650] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 645.370909] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-71a1b958-c45b-4f27-b9a2-3d17e4f80fca {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.387547] env[69328]: DEBUG oslo_concurrency.lockutils [req-d12973ee-4056-4b9e-8d7e-59b9c74c1e87 req-8ea5b7e4-2875-4029-82e1-3042b667955b service nova] Releasing lock "refresh_cache-e92953f4-b634-4ef9-a5ad-63a886cfa007" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 645.387882] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 645.388080] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 645.391233] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b6dd938-0d5e-48b6-8e7b-667dda11c870 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.396812] env[69328]: DEBUG oslo_vmware.api [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Waiting for the task: (returnval){ [ 645.396812] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5252902d-8235-9760-cc35-eaf3965b1b96" [ 645.396812] env[69328]: _type = "Task" [ 645.396812] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.406447] env[69328]: DEBUG oslo_vmware.api [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5252902d-8235-9760-cc35-eaf3965b1b96, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.585615] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32a4facb-cbc7-4f69-924a-1e219fa2a241 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.588782] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2943fff6-bb40-405e-a549-f2cad6459000 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Acquiring lock "refresh_cache-26feb2d1-ff64-4a13-af83-b6d5fe4348e1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 645.589220] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2943fff6-bb40-405e-a549-f2cad6459000 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Acquired lock "refresh_cache-26feb2d1-ff64-4a13-af83-b6d5fe4348e1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 645.589220] env[69328]: DEBUG nova.network.neutron [None req-2943fff6-bb40-405e-a549-f2cad6459000 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 645.595426] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52872a9b-82ba-4047-9614-77ecf6987943 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.632747] env[69328]: DEBUG nova.compute.manager [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 645.639787] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7b33f0c-6ff7-412d-8cc4-83e2947fe09f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.648486] env[69328]: DEBUG oslo_vmware.api [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Task: {'id': task-3272830, 'name': Rename_Task, 'duration_secs': 0.152497} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.650829] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 645.651367] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ecc5d210-7768-4201-9866-79a6be1d37ef {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.655122] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b2785ea-b367-420d-9178-6f81aa7d0606 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.675043] env[69328]: DEBUG nova.compute.provider_tree [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 645.678208] env[69328]: DEBUG oslo_vmware.api [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Waiting for the task: (returnval){ [ 645.678208] env[69328]: value = "task-3272831" [ 645.678208] env[69328]: _type = "Task" [ 645.678208] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.686751] env[69328]: DEBUG oslo_vmware.api [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Task: {'id': task-3272831, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.785775] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Acquiring lock "4c54c0dd-32f1-4d35-b770-3e1a540c54a7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 645.786388] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Lock "4c54c0dd-32f1-4d35-b770-3e1a540c54a7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 645.911963] env[69328]: DEBUG oslo_vmware.api [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5252902d-8235-9760-cc35-eaf3965b1b96, 'name': SearchDatastore_Task, 'duration_secs': 0.051585} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.913189] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39e30c1b-e425-45b2-8fbb-0bf110f7a842 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.919771] env[69328]: DEBUG oslo_vmware.api [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Waiting for the task: (returnval){ [ 645.919771] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5241f1a2-6507-0ce4-be31-91beb22fac7f" [ 645.919771] env[69328]: _type = "Task" [ 645.919771] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.929484] env[69328]: DEBUG oslo_vmware.api [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5241f1a2-6507-0ce4-be31-91beb22fac7f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.120987] env[69328]: DEBUG nova.network.neutron [None req-2943fff6-bb40-405e-a549-f2cad6459000 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 646.177286] env[69328]: DEBUG oslo_concurrency.lockutils [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 646.181735] env[69328]: DEBUG nova.scheduler.client.report [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 646.200470] env[69328]: DEBUG oslo_vmware.api [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Task: {'id': task-3272831, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.220690] env[69328]: DEBUG nova.network.neutron [None req-2943fff6-bb40-405e-a549-f2cad6459000 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 646.434097] env[69328]: DEBUG oslo_vmware.api [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5241f1a2-6507-0ce4-be31-91beb22fac7f, 'name': SearchDatastore_Task, 'duration_secs': 0.012492} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.434650] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 646.435047] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] e92953f4-b634-4ef9-a5ad-63a886cfa007/e92953f4-b634-4ef9-a5ad-63a886cfa007.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 646.435777] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f4729d6d-1d42-4f29-846b-0e7939d09251 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.443828] env[69328]: DEBUG oslo_vmware.api [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Waiting for the task: (returnval){ [ 646.443828] env[69328]: value = "task-3272832" [ 646.443828] env[69328]: _type = "Task" [ 646.443828] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.451872] env[69328]: DEBUG oslo_vmware.api [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Task: {'id': task-3272832, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.507848] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Acquiring lock "e5d3df12-5334-44c8-9a44-1674e57918bb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 646.508583] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Lock "e5d3df12-5334-44c8-9a44-1674e57918bb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 646.648025] env[69328]: DEBUG nova.compute.manager [None req-b5958dfb-5c25-49ce-9cd7-3bb83428eee3 tempest-ServerDiagnosticsV248Test-1061109847 tempest-ServerDiagnosticsV248Test-1061109847-project-admin] [instance: f428f9a9-d792-4c1c-b2d4-ea066cc09d67] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 646.648691] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-187cdde5-6bfb-4077-a07c-5ace17038b4f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.658473] env[69328]: INFO nova.compute.manager [None req-b5958dfb-5c25-49ce-9cd7-3bb83428eee3 tempest-ServerDiagnosticsV248Test-1061109847 tempest-ServerDiagnosticsV248Test-1061109847-project-admin] [instance: f428f9a9-d792-4c1c-b2d4-ea066cc09d67] Retrieving diagnostics [ 646.658888] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11c2ba45-ad45-4aec-a25a-d7e561d8bf3a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.697670] env[69328]: DEBUG oslo_concurrency.lockutils [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.653s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 646.698270] env[69328]: INFO nova.compute.manager [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Migrating [ 646.698681] env[69328]: DEBUG oslo_concurrency.lockutils [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 646.699154] env[69328]: DEBUG oslo_concurrency.lockutils [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Acquired lock "compute-rpcapi-router" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 646.701470] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.035s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 646.702467] env[69328]: INFO nova.compute.claims [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 9753734d-90f0-4661-8029-ec312e88eb60] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 646.715230] env[69328]: DEBUG oslo_vmware.api [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Task: {'id': task-3272831, 'name': PowerOnVM_Task, 'duration_secs': 0.541643} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.715230] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 646.715230] env[69328]: INFO nova.compute.manager [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Took 9.38 seconds to spawn the instance on the hypervisor. [ 646.715230] env[69328]: DEBUG nova.compute.manager [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 646.715230] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e58b3f73-c88e-47c3-bedc-e217845c2cf1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.728235] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2943fff6-bb40-405e-a549-f2cad6459000 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Releasing lock "refresh_cache-26feb2d1-ff64-4a13-af83-b6d5fe4348e1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 646.728235] env[69328]: DEBUG nova.compute.manager [None req-2943fff6-bb40-405e-a549-f2cad6459000 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 646.728235] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2943fff6-bb40-405e-a549-f2cad6459000 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 646.728530] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e341f562-199d-4d2f-bcf4-157c183fd347 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.736302] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-2943fff6-bb40-405e-a549-f2cad6459000 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 646.736768] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-24c9d1e8-7062-4df3-8c08-d430c7a334e9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.743154] env[69328]: DEBUG oslo_vmware.api [None req-2943fff6-bb40-405e-a549-f2cad6459000 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Waiting for the task: (returnval){ [ 646.743154] env[69328]: value = "task-3272833" [ 646.743154] env[69328]: _type = "Task" [ 646.743154] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.754023] env[69328]: DEBUG oslo_vmware.api [None req-2943fff6-bb40-405e-a549-f2cad6459000 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Task: {'id': task-3272833, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.953405] env[69328]: DEBUG oslo_vmware.api [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Task: {'id': task-3272832, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.207116] env[69328]: INFO nova.compute.rpcapi [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Automatically selected compute RPC version 6.4 from minimum service version 68 [ 647.208277] env[69328]: DEBUG oslo_concurrency.lockutils [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Releasing lock "compute-rpcapi-router" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 647.241377] env[69328]: INFO nova.compute.manager [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Took 31.84 seconds to build instance. [ 647.254457] env[69328]: DEBUG oslo_vmware.api [None req-2943fff6-bb40-405e-a549-f2cad6459000 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Task: {'id': task-3272833, 'name': PowerOffVM_Task, 'duration_secs': 0.374959} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.254715] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-2943fff6-bb40-405e-a549-f2cad6459000 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 647.254878] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2943fff6-bb40-405e-a549-f2cad6459000 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 647.255215] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-17e1641b-49b1-4adb-aa99-7fd52e36ab5e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.279830] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2943fff6-bb40-405e-a549-f2cad6459000 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 647.280167] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2943fff6-bb40-405e-a549-f2cad6459000 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 647.280263] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-2943fff6-bb40-405e-a549-f2cad6459000 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Deleting the datastore file [datastore1] 26feb2d1-ff64-4a13-af83-b6d5fe4348e1 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 647.280517] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0760c9c9-b1f4-4bde-b0bc-32bdb73fc370 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.287259] env[69328]: DEBUG oslo_vmware.api [None req-2943fff6-bb40-405e-a549-f2cad6459000 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Waiting for the task: (returnval){ [ 647.287259] env[69328]: value = "task-3272835" [ 647.287259] env[69328]: _type = "Task" [ 647.287259] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.295213] env[69328]: DEBUG oslo_vmware.api [None req-2943fff6-bb40-405e-a549-f2cad6459000 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Task: {'id': task-3272835, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.453152] env[69328]: DEBUG oslo_vmware.api [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Task: {'id': task-3272832, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.678913} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.453422] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] e92953f4-b634-4ef9-a5ad-63a886cfa007/e92953f4-b634-4ef9-a5ad-63a886cfa007.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 647.453635] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 647.453874] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ba0d741e-b646-409b-a94e-343b3a403bc4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.460518] env[69328]: DEBUG oslo_vmware.api [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Waiting for the task: (returnval){ [ 647.460518] env[69328]: value = "task-3272836" [ 647.460518] env[69328]: _type = "Task" [ 647.460518] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.467707] env[69328]: DEBUG oslo_vmware.api [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Task: {'id': task-3272836, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.542375] env[69328]: DEBUG nova.compute.manager [None req-29c55b94-c329-4ae5-8d18-236afae3d2b5 tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 647.543285] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0637172-8280-42ed-b40a-5ad3809d0de6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.728080] env[69328]: DEBUG oslo_concurrency.lockutils [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Acquiring lock "refresh_cache-d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 647.728259] env[69328]: DEBUG oslo_concurrency.lockutils [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Acquired lock "refresh_cache-d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 647.728431] env[69328]: DEBUG nova.network.neutron [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 647.730059] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Acquiring lock "732342ea-2f73-40ea-a826-883ddc7a385a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 647.730319] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Lock "732342ea-2f73-40ea-a826-883ddc7a385a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 647.742113] env[69328]: DEBUG oslo_concurrency.lockutils [None req-aecf6b6a-c071-487f-9170-68bd65563a08 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Lock "46526210-2783-408d-9ecb-773f33ff0c66" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.427s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 647.799426] env[69328]: DEBUG oslo_vmware.api [None req-2943fff6-bb40-405e-a549-f2cad6459000 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Task: {'id': task-3272835, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.104963} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.799681] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-2943fff6-bb40-405e-a549-f2cad6459000 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 647.799872] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2943fff6-bb40-405e-a549-f2cad6459000 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 647.800069] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2943fff6-bb40-405e-a549-f2cad6459000 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 647.800280] env[69328]: INFO nova.compute.manager [None req-2943fff6-bb40-405e-a549-f2cad6459000 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Took 1.07 seconds to destroy the instance on the hypervisor. [ 647.800458] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2943fff6-bb40-405e-a549-f2cad6459000 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 647.802973] env[69328]: DEBUG nova.compute.manager [-] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 647.802973] env[69328]: DEBUG nova.network.neutron [-] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 647.820267] env[69328]: DEBUG nova.network.neutron [-] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 647.973535] env[69328]: DEBUG oslo_vmware.api [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Task: {'id': task-3272836, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.234331} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.973821] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 647.974717] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87a64493-95c0-40d5-8d21-7b545b1f962a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.000807] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Reconfiguring VM instance instance-00000013 to attach disk [datastore1] e92953f4-b634-4ef9-a5ad-63a886cfa007/e92953f4-b634-4ef9-a5ad-63a886cfa007.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 648.003542] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bd9d8a04-8129-4e89-90a0-375cab09aae1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.023675] env[69328]: DEBUG oslo_vmware.api [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Waiting for the task: (returnval){ [ 648.023675] env[69328]: value = "task-3272837" [ 648.023675] env[69328]: _type = "Task" [ 648.023675] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.033746] env[69328]: DEBUG oslo_vmware.api [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Task: {'id': task-3272837, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.040042] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6ad47de2-9a72-45ee-be43-3ebb4501e24d tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Acquiring lock "f428f9a9-d792-4c1c-b2d4-ea066cc09d67" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 648.040307] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6ad47de2-9a72-45ee-be43-3ebb4501e24d tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Lock "f428f9a9-d792-4c1c-b2d4-ea066cc09d67" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 648.040593] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6ad47de2-9a72-45ee-be43-3ebb4501e24d tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Acquiring lock "f428f9a9-d792-4c1c-b2d4-ea066cc09d67-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 648.040786] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6ad47de2-9a72-45ee-be43-3ebb4501e24d tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Lock "f428f9a9-d792-4c1c-b2d4-ea066cc09d67-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 648.040952] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6ad47de2-9a72-45ee-be43-3ebb4501e24d tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Lock "f428f9a9-d792-4c1c-b2d4-ea066cc09d67-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 648.043089] env[69328]: INFO nova.compute.manager [None req-6ad47de2-9a72-45ee-be43-3ebb4501e24d tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] [instance: f428f9a9-d792-4c1c-b2d4-ea066cc09d67] Terminating instance [ 648.055897] env[69328]: INFO nova.compute.manager [None req-29c55b94-c329-4ae5-8d18-236afae3d2b5 tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] instance snapshotting [ 648.058519] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f116110-eb6c-4a36-a86c-34344b5d8ba0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.081602] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14a43aca-5557-46ad-916f-a40151bb2ffa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.183021] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4e82b3e-93ba-41fa-9777-27ebab00365b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.189139] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7cc493a-ad95-43e6-a247-bf4fe3b77614 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.225019] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fa1c09f-e99b-4f87-917c-5cb2cda4f7f6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.235029] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9f88e6a-9ec2-4102-9698-0f5a8913f2ce {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.249840] env[69328]: DEBUG nova.compute.manager [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] [instance: b7409a67-c140-436f-9c4e-27dae259f648] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 648.254200] env[69328]: DEBUG nova.compute.provider_tree [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 648.323902] env[69328]: DEBUG nova.network.neutron [-] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 648.534786] env[69328]: DEBUG oslo_vmware.api [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Task: {'id': task-3272837, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.546826] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6ad47de2-9a72-45ee-be43-3ebb4501e24d tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Acquiring lock "refresh_cache-f428f9a9-d792-4c1c-b2d4-ea066cc09d67" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 648.547104] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6ad47de2-9a72-45ee-be43-3ebb4501e24d tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Acquired lock "refresh_cache-f428f9a9-d792-4c1c-b2d4-ea066cc09d67" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 648.547359] env[69328]: DEBUG nova.network.neutron [None req-6ad47de2-9a72-45ee-be43-3ebb4501e24d tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] [instance: f428f9a9-d792-4c1c-b2d4-ea066cc09d67] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 648.593091] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-29c55b94-c329-4ae5-8d18-236afae3d2b5 tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Creating Snapshot of the VM instance {{(pid=69328) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 648.593479] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-4bd68678-f02d-4f92-bb21-d1add3996b7a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.603880] env[69328]: DEBUG oslo_vmware.api [None req-29c55b94-c329-4ae5-8d18-236afae3d2b5 tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Waiting for the task: (returnval){ [ 648.603880] env[69328]: value = "task-3272838" [ 648.603880] env[69328]: _type = "Task" [ 648.603880] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.615209] env[69328]: DEBUG oslo_vmware.api [None req-29c55b94-c329-4ae5-8d18-236afae3d2b5 tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': task-3272838, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.621070] env[69328]: DEBUG nova.network.neutron [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Updating instance_info_cache with network_info: [{"id": "cbf38f9d-1507-45bb-9684-bf804c86b93b", "address": "fa:16:3e:a1:39:be", "network": {"id": "4707aea3-ce46-4fe0-bf5c-141ee5596a86", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.236", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ecd184c6b78b4e4297fb93abb94aa37d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbf38f9d-15", "ovs_interfaceid": "cbf38f9d-1507-45bb-9684-bf804c86b93b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 648.730045] env[69328]: DEBUG nova.compute.manager [req-7fb2336a-7fa9-4ed2-a8eb-78f149fef5c5 req-497d69b2-17de-48f7-b459-fcb3a462d7e2 service nova] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Received event network-changed-a95af8f2-189b-449d-974d-b380402c6a3f {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 648.730304] env[69328]: DEBUG nova.compute.manager [req-7fb2336a-7fa9-4ed2-a8eb-78f149fef5c5 req-497d69b2-17de-48f7-b459-fcb3a462d7e2 service nova] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Refreshing instance network info cache due to event network-changed-a95af8f2-189b-449d-974d-b380402c6a3f. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 648.730519] env[69328]: DEBUG oslo_concurrency.lockutils [req-7fb2336a-7fa9-4ed2-a8eb-78f149fef5c5 req-497d69b2-17de-48f7-b459-fcb3a462d7e2 service nova] Acquiring lock "refresh_cache-46526210-2783-408d-9ecb-773f33ff0c66" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 648.730676] env[69328]: DEBUG oslo_concurrency.lockutils [req-7fb2336a-7fa9-4ed2-a8eb-78f149fef5c5 req-497d69b2-17de-48f7-b459-fcb3a462d7e2 service nova] Acquired lock "refresh_cache-46526210-2783-408d-9ecb-773f33ff0c66" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 648.731812] env[69328]: DEBUG nova.network.neutron [req-7fb2336a-7fa9-4ed2-a8eb-78f149fef5c5 req-497d69b2-17de-48f7-b459-fcb3a462d7e2 service nova] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Refreshing network info cache for port a95af8f2-189b-449d-974d-b380402c6a3f {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 648.759435] env[69328]: DEBUG nova.scheduler.client.report [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 648.782505] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 648.825522] env[69328]: INFO nova.compute.manager [-] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Took 1.02 seconds to deallocate network for instance. [ 649.034352] env[69328]: DEBUG oslo_vmware.api [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Task: {'id': task-3272837, 'name': ReconfigVM_Task, 'duration_secs': 0.53771} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.034639] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Reconfigured VM instance instance-00000013 to attach disk [datastore1] e92953f4-b634-4ef9-a5ad-63a886cfa007/e92953f4-b634-4ef9-a5ad-63a886cfa007.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 649.035365] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-967243b2-469f-4d6e-94e8-ebba89acf46a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.041019] env[69328]: DEBUG oslo_vmware.api [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Waiting for the task: (returnval){ [ 649.041019] env[69328]: value = "task-3272839" [ 649.041019] env[69328]: _type = "Task" [ 649.041019] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.048798] env[69328]: DEBUG oslo_vmware.api [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Task: {'id': task-3272839, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.078079] env[69328]: DEBUG nova.network.neutron [None req-6ad47de2-9a72-45ee-be43-3ebb4501e24d tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] [instance: f428f9a9-d792-4c1c-b2d4-ea066cc09d67] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 649.113970] env[69328]: DEBUG oslo_vmware.api [None req-29c55b94-c329-4ae5-8d18-236afae3d2b5 tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': task-3272838, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.124788] env[69328]: DEBUG oslo_concurrency.lockutils [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Releasing lock "refresh_cache-d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 649.139468] env[69328]: DEBUG nova.network.neutron [None req-6ad47de2-9a72-45ee-be43-3ebb4501e24d tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] [instance: f428f9a9-d792-4c1c-b2d4-ea066cc09d67] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 649.267214] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.566s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 649.267913] env[69328]: DEBUG nova.compute.manager [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 9753734d-90f0-4661-8029-ec312e88eb60] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 649.270883] env[69328]: DEBUG oslo_concurrency.lockutils [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.484s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 649.272360] env[69328]: INFO nova.compute.claims [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 649.333084] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2943fff6-bb40-405e-a549-f2cad6459000 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 649.494563] env[69328]: DEBUG nova.network.neutron [req-7fb2336a-7fa9-4ed2-a8eb-78f149fef5c5 req-497d69b2-17de-48f7-b459-fcb3a462d7e2 service nova] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Updated VIF entry in instance network info cache for port a95af8f2-189b-449d-974d-b380402c6a3f. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 649.494932] env[69328]: DEBUG nova.network.neutron [req-7fb2336a-7fa9-4ed2-a8eb-78f149fef5c5 req-497d69b2-17de-48f7-b459-fcb3a462d7e2 service nova] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Updating instance_info_cache with network_info: [{"id": "a95af8f2-189b-449d-974d-b380402c6a3f", "address": "fa:16:3e:2d:14:69", "network": {"id": "749d8822-e19e-4761-9e9f-f5717a49481a", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-136341088-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.135", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6895a4954cb4bc89dab40eb3f655606", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa95af8f2-18", "ovs_interfaceid": "a95af8f2-189b-449d-974d-b380402c6a3f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 649.551619] env[69328]: DEBUG oslo_vmware.api [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Task: {'id': task-3272839, 'name': Rename_Task, 'duration_secs': 0.162376} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.551881] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 649.552134] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0d798634-1c2b-454a-bda2-970ae437af57 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.558080] env[69328]: DEBUG oslo_vmware.api [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Waiting for the task: (returnval){ [ 649.558080] env[69328]: value = "task-3272840" [ 649.558080] env[69328]: _type = "Task" [ 649.558080] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.565305] env[69328]: DEBUG oslo_vmware.api [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Task: {'id': task-3272840, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.613912] env[69328]: DEBUG oslo_vmware.api [None req-29c55b94-c329-4ae5-8d18-236afae3d2b5 tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': task-3272838, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.641603] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6ad47de2-9a72-45ee-be43-3ebb4501e24d tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Releasing lock "refresh_cache-f428f9a9-d792-4c1c-b2d4-ea066cc09d67" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 649.642045] env[69328]: DEBUG nova.compute.manager [None req-6ad47de2-9a72-45ee-be43-3ebb4501e24d tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] [instance: f428f9a9-d792-4c1c-b2d4-ea066cc09d67] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 649.642259] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-6ad47de2-9a72-45ee-be43-3ebb4501e24d tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] [instance: f428f9a9-d792-4c1c-b2d4-ea066cc09d67] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 649.643194] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff0ebd5a-4c73-4725-895d-194eaf413bff {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.650861] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ad47de2-9a72-45ee-be43-3ebb4501e24d tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] [instance: f428f9a9-d792-4c1c-b2d4-ea066cc09d67] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 649.651148] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1ca52147-8790-45c4-8184-b8dfe6a020cf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.656467] env[69328]: DEBUG oslo_vmware.api [None req-6ad47de2-9a72-45ee-be43-3ebb4501e24d tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Waiting for the task: (returnval){ [ 649.656467] env[69328]: value = "task-3272841" [ 649.656467] env[69328]: _type = "Task" [ 649.656467] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.663938] env[69328]: DEBUG oslo_vmware.api [None req-6ad47de2-9a72-45ee-be43-3ebb4501e24d tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Task: {'id': task-3272841, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.776517] env[69328]: DEBUG nova.compute.utils [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 649.780480] env[69328]: DEBUG nova.compute.manager [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 9753734d-90f0-4661-8029-ec312e88eb60] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 649.780688] env[69328]: DEBUG nova.network.neutron [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 9753734d-90f0-4661-8029-ec312e88eb60] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 649.866317] env[69328]: DEBUG nova.policy [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'baaa0ef287264c0f8d2ff4dc7ad5b69c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2e75682b9ac94ac39fc889c033d8e9b9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 649.997484] env[69328]: DEBUG oslo_concurrency.lockutils [req-7fb2336a-7fa9-4ed2-a8eb-78f149fef5c5 req-497d69b2-17de-48f7-b459-fcb3a462d7e2 service nova] Releasing lock "refresh_cache-46526210-2783-408d-9ecb-773f33ff0c66" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 650.067404] env[69328]: DEBUG oslo_vmware.api [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Task: {'id': task-3272840, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.115640] env[69328]: DEBUG oslo_vmware.api [None req-29c55b94-c329-4ae5-8d18-236afae3d2b5 tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': task-3272838, 'name': CreateSnapshot_Task, 'duration_secs': 1.323401} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.115640] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-29c55b94-c329-4ae5-8d18-236afae3d2b5 tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Created Snapshot of the VM instance {{(pid=69328) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 650.116304] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adb48585-5bd2-4f50-868d-bd976c5c9533 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.168102] env[69328]: DEBUG oslo_vmware.api [None req-6ad47de2-9a72-45ee-be43-3ebb4501e24d tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Task: {'id': task-3272841, 'name': PowerOffVM_Task, 'duration_secs': 0.137774} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.168102] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ad47de2-9a72-45ee-be43-3ebb4501e24d tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] [instance: f428f9a9-d792-4c1c-b2d4-ea066cc09d67] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 650.168102] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-6ad47de2-9a72-45ee-be43-3ebb4501e24d tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] [instance: f428f9a9-d792-4c1c-b2d4-ea066cc09d67] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 650.168102] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3c6285f1-42a5-4e97-a973-9c1d46c15192 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.172385] env[69328]: DEBUG nova.network.neutron [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 9753734d-90f0-4661-8029-ec312e88eb60] Successfully created port: 9c9fec0b-e493-4950-9da7-bdb3214def9f {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 650.195170] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-6ad47de2-9a72-45ee-be43-3ebb4501e24d tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] [instance: f428f9a9-d792-4c1c-b2d4-ea066cc09d67] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 650.195417] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-6ad47de2-9a72-45ee-be43-3ebb4501e24d tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] [instance: f428f9a9-d792-4c1c-b2d4-ea066cc09d67] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 650.195629] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ad47de2-9a72-45ee-be43-3ebb4501e24d tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Deleting the datastore file [datastore2] f428f9a9-d792-4c1c-b2d4-ea066cc09d67 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 650.195887] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a04887dc-69a6-465d-9978-3652a5755923 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.202344] env[69328]: DEBUG oslo_vmware.api [None req-6ad47de2-9a72-45ee-be43-3ebb4501e24d tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Waiting for the task: (returnval){ [ 650.202344] env[69328]: value = "task-3272843" [ 650.202344] env[69328]: _type = "Task" [ 650.202344] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.210501] env[69328]: DEBUG oslo_vmware.api [None req-6ad47de2-9a72-45ee-be43-3ebb4501e24d tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Task: {'id': task-3272843, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.287011] env[69328]: DEBUG nova.compute.manager [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 9753734d-90f0-4661-8029-ec312e88eb60] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 650.567508] env[69328]: DEBUG oslo_vmware.api [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Task: {'id': task-3272840, 'name': PowerOnVM_Task} progress is 94%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.634612] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-29c55b94-c329-4ae5-8d18-236afae3d2b5 tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Creating linked-clone VM from snapshot {{(pid=69328) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 650.636982] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-264ccd77-ae6b-4dda-b836-0ba46bdb4cd4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.641040] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9622efdf-fcaa-4f94-8912-7c1bd980bf65 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.662058] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Updating instance 'd37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8' progress to 0 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 650.669311] env[69328]: DEBUG oslo_vmware.api [None req-29c55b94-c329-4ae5-8d18-236afae3d2b5 tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Waiting for the task: (returnval){ [ 650.669311] env[69328]: value = "task-3272844" [ 650.669311] env[69328]: _type = "Task" [ 650.669311] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.678861] env[69328]: DEBUG oslo_vmware.api [None req-29c55b94-c329-4ae5-8d18-236afae3d2b5 tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': task-3272844, 'name': CloneVM_Task} progress is 12%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.717189] env[69328]: DEBUG oslo_vmware.api [None req-6ad47de2-9a72-45ee-be43-3ebb4501e24d tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Task: {'id': task-3272843, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.09427} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.717423] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ad47de2-9a72-45ee-be43-3ebb4501e24d tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 650.717605] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-6ad47de2-9a72-45ee-be43-3ebb4501e24d tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] [instance: f428f9a9-d792-4c1c-b2d4-ea066cc09d67] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 650.717779] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-6ad47de2-9a72-45ee-be43-3ebb4501e24d tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] [instance: f428f9a9-d792-4c1c-b2d4-ea066cc09d67] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 650.717944] env[69328]: INFO nova.compute.manager [None req-6ad47de2-9a72-45ee-be43-3ebb4501e24d tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] [instance: f428f9a9-d792-4c1c-b2d4-ea066cc09d67] Took 1.08 seconds to destroy the instance on the hypervisor. [ 650.718210] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6ad47de2-9a72-45ee-be43-3ebb4501e24d tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 650.718392] env[69328]: DEBUG nova.compute.manager [-] [instance: f428f9a9-d792-4c1c-b2d4-ea066cc09d67] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 650.718486] env[69328]: DEBUG nova.network.neutron [-] [instance: f428f9a9-d792-4c1c-b2d4-ea066cc09d67] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 650.735248] env[69328]: DEBUG nova.network.neutron [-] [instance: f428f9a9-d792-4c1c-b2d4-ea066cc09d67] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 650.761448] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-908874f1-7cdc-45c4-8369-afc4a42761c7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.768722] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b74bf035-af9c-44f7-b102-f97219f5a62c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.804770] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50d7eec6-f351-4956-9fab-9e6d1f70d160 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.814406] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf0157e4-6859-4ebb-bbde-e9197eea2053 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.830914] env[69328]: DEBUG nova.compute.provider_tree [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 651.070060] env[69328]: DEBUG oslo_vmware.api [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Task: {'id': task-3272840, 'name': PowerOnVM_Task, 'duration_secs': 1.148299} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.070487] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 651.070735] env[69328]: INFO nova.compute.manager [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Took 9.37 seconds to spawn the instance on the hypervisor. [ 651.071063] env[69328]: DEBUG nova.compute.manager [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 651.071959] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6df80f49-a9cb-45a2-9d21-5253e8c5607b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.172117] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 651.172599] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9d8a4274-dc1e-47ec-812b-cd2ff87a4ca1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.185656] env[69328]: DEBUG oslo_vmware.api [None req-29c55b94-c329-4ae5-8d18-236afae3d2b5 tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': task-3272844, 'name': CloneVM_Task} progress is 94%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.187332] env[69328]: DEBUG oslo_vmware.api [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Waiting for the task: (returnval){ [ 651.187332] env[69328]: value = "task-3272845" [ 651.187332] env[69328]: _type = "Task" [ 651.187332] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.195934] env[69328]: DEBUG oslo_vmware.api [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3272845, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.237496] env[69328]: DEBUG nova.network.neutron [-] [instance: f428f9a9-d792-4c1c-b2d4-ea066cc09d67] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 651.310197] env[69328]: DEBUG nova.compute.manager [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 9753734d-90f0-4661-8029-ec312e88eb60] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 651.335041] env[69328]: DEBUG nova.scheduler.client.report [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 651.342795] env[69328]: DEBUG nova.virt.hardware [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 651.343135] env[69328]: DEBUG nova.virt.hardware [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 651.343390] env[69328]: DEBUG nova.virt.hardware [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 651.343667] env[69328]: DEBUG nova.virt.hardware [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 651.343887] env[69328]: DEBUG nova.virt.hardware [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 651.344124] env[69328]: DEBUG nova.virt.hardware [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 651.344432] env[69328]: DEBUG nova.virt.hardware [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 651.344674] env[69328]: DEBUG nova.virt.hardware [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 651.344926] env[69328]: DEBUG nova.virt.hardware [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 651.345190] env[69328]: DEBUG nova.virt.hardware [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 651.345455] env[69328]: DEBUG nova.virt.hardware [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 651.346693] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eae0e7b-a261-40cd-b6e6-82d2e93c202f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.358095] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b66c784-bc49-4fb9-b8d8-c995bf538d74 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.594774] env[69328]: INFO nova.compute.manager [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Took 34.78 seconds to build instance. [ 651.647146] env[69328]: DEBUG nova.compute.manager [req-08e9aafa-c970-411a-a480-0ebbc2190d46 req-6267484e-ebe9-472d-a2c1-40fec8c80c4a service nova] [instance: 9753734d-90f0-4661-8029-ec312e88eb60] Received event network-vif-plugged-9c9fec0b-e493-4950-9da7-bdb3214def9f {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 651.647476] env[69328]: DEBUG oslo_concurrency.lockutils [req-08e9aafa-c970-411a-a480-0ebbc2190d46 req-6267484e-ebe9-472d-a2c1-40fec8c80c4a service nova] Acquiring lock "9753734d-90f0-4661-8029-ec312e88eb60-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 651.647668] env[69328]: DEBUG oslo_concurrency.lockutils [req-08e9aafa-c970-411a-a480-0ebbc2190d46 req-6267484e-ebe9-472d-a2c1-40fec8c80c4a service nova] Lock "9753734d-90f0-4661-8029-ec312e88eb60-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 651.647920] env[69328]: DEBUG oslo_concurrency.lockutils [req-08e9aafa-c970-411a-a480-0ebbc2190d46 req-6267484e-ebe9-472d-a2c1-40fec8c80c4a service nova] Lock "9753734d-90f0-4661-8029-ec312e88eb60-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 651.648582] env[69328]: DEBUG nova.compute.manager [req-08e9aafa-c970-411a-a480-0ebbc2190d46 req-6267484e-ebe9-472d-a2c1-40fec8c80c4a service nova] [instance: 9753734d-90f0-4661-8029-ec312e88eb60] No waiting events found dispatching network-vif-plugged-9c9fec0b-e493-4950-9da7-bdb3214def9f {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 651.648851] env[69328]: WARNING nova.compute.manager [req-08e9aafa-c970-411a-a480-0ebbc2190d46 req-6267484e-ebe9-472d-a2c1-40fec8c80c4a service nova] [instance: 9753734d-90f0-4661-8029-ec312e88eb60] Received unexpected event network-vif-plugged-9c9fec0b-e493-4950-9da7-bdb3214def9f for instance with vm_state building and task_state spawning. [ 651.683041] env[69328]: DEBUG oslo_vmware.api [None req-29c55b94-c329-4ae5-8d18-236afae3d2b5 tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': task-3272844, 'name': CloneVM_Task} progress is 95%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.696484] env[69328]: DEBUG oslo_vmware.api [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3272845, 'name': PowerOffVM_Task, 'duration_secs': 0.185847} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.696858] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 651.696975] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Updating instance 'd37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8' progress to 17 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 651.714967] env[69328]: DEBUG nova.network.neutron [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 9753734d-90f0-4661-8029-ec312e88eb60] Successfully updated port: 9c9fec0b-e493-4950-9da7-bdb3214def9f {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 651.739868] env[69328]: INFO nova.compute.manager [-] [instance: f428f9a9-d792-4c1c-b2d4-ea066cc09d67] Took 1.02 seconds to deallocate network for instance. [ 651.852458] env[69328]: DEBUG oslo_concurrency.lockutils [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.581s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 651.853104] env[69328]: DEBUG nova.compute.manager [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 651.856041] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d6a49c30-7556-4612-8fbd-a35acb80e9bd tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.138s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 651.856041] env[69328]: DEBUG nova.objects.instance [None req-d6a49c30-7556-4612-8fbd-a35acb80e9bd tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lazy-loading 'resources' on Instance uuid ed10d511-dbed-4884-8ac6-f737173f62c5 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 652.098035] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a0566077-2533-47fa-9dd6-d84c3d310502 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Lock "e92953f4-b634-4ef9-a5ad-63a886cfa007" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.502s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 652.184600] env[69328]: DEBUG oslo_vmware.api [None req-29c55b94-c329-4ae5-8d18-236afae3d2b5 tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': task-3272844, 'name': CloneVM_Task, 'duration_secs': 1.425189} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.184600] env[69328]: INFO nova.virt.vmwareapi.vmops [None req-29c55b94-c329-4ae5-8d18-236afae3d2b5 tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Created linked-clone VM from snapshot [ 652.184893] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-219a930f-9aa4-4fde-b34d-a55f7f92552b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.192148] env[69328]: DEBUG nova.virt.vmwareapi.images [None req-29c55b94-c329-4ae5-8d18-236afae3d2b5 tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Uploading image 5565639c-8723-42da-a19f-db38009d07f6 {{(pid=69328) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 652.203169] env[69328]: DEBUG nova.virt.hardware [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:34:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 652.203394] env[69328]: DEBUG nova.virt.hardware [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 652.203551] env[69328]: DEBUG nova.virt.hardware [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 652.203728] env[69328]: DEBUG nova.virt.hardware [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 652.203876] env[69328]: DEBUG nova.virt.hardware [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 652.204038] env[69328]: DEBUG nova.virt.hardware [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 652.204418] env[69328]: DEBUG nova.virt.hardware [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 652.204597] env[69328]: DEBUG nova.virt.hardware [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 652.205282] env[69328]: DEBUG nova.virt.hardware [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 652.205282] env[69328]: DEBUG nova.virt.hardware [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 652.205553] env[69328]: DEBUG nova.virt.hardware [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 652.213164] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-32870666-abe5-4c0a-ac76-f378d4981d29 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.227891] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Acquiring lock "refresh_cache-9753734d-90f0-4661-8029-ec312e88eb60" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 652.228049] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Acquired lock "refresh_cache-9753734d-90f0-4661-8029-ec312e88eb60" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 652.228243] env[69328]: DEBUG nova.network.neutron [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 9753734d-90f0-4661-8029-ec312e88eb60] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 652.234674] env[69328]: DEBUG oslo_vmware.rw_handles [None req-29c55b94-c329-4ae5-8d18-236afae3d2b5 tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 652.234674] env[69328]: value = "vm-653708" [ 652.234674] env[69328]: _type = "VirtualMachine" [ 652.234674] env[69328]: }. {{(pid=69328) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 652.234949] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-1e7c1b83-f95b-4c36-a425-d1f59fffbe30 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.238555] env[69328]: DEBUG oslo_vmware.api [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Waiting for the task: (returnval){ [ 652.238555] env[69328]: value = "task-3272846" [ 652.238555] env[69328]: _type = "Task" [ 652.238555] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.244850] env[69328]: DEBUG oslo_vmware.rw_handles [None req-29c55b94-c329-4ae5-8d18-236afae3d2b5 tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Lease: (returnval){ [ 652.244850] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]527f9972-8b19-e80d-d86e-dc613c151123" [ 652.244850] env[69328]: _type = "HttpNfcLease" [ 652.244850] env[69328]: } obtained for exporting VM: (result){ [ 652.244850] env[69328]: value = "vm-653708" [ 652.244850] env[69328]: _type = "VirtualMachine" [ 652.244850] env[69328]: }. {{(pid=69328) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 652.245192] env[69328]: DEBUG oslo_vmware.api [None req-29c55b94-c329-4ae5-8d18-236afae3d2b5 tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Waiting for the lease: (returnval){ [ 652.245192] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]527f9972-8b19-e80d-d86e-dc613c151123" [ 652.245192] env[69328]: _type = "HttpNfcLease" [ 652.245192] env[69328]: } to be ready. {{(pid=69328) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 652.246017] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6ad47de2-9a72-45ee-be43-3ebb4501e24d tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 652.252303] env[69328]: DEBUG oslo_vmware.api [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3272846, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.256442] env[69328]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 652.256442] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]527f9972-8b19-e80d-d86e-dc613c151123" [ 652.256442] env[69328]: _type = "HttpNfcLease" [ 652.256442] env[69328]: } is ready. {{(pid=69328) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 652.256773] env[69328]: DEBUG oslo_vmware.rw_handles [None req-29c55b94-c329-4ae5-8d18-236afae3d2b5 tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 652.256773] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]527f9972-8b19-e80d-d86e-dc613c151123" [ 652.256773] env[69328]: _type = "HttpNfcLease" [ 652.256773] env[69328]: }. {{(pid=69328) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 652.257647] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cf400ae-ceb2-40eb-b443-367e0a94c005 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.267453] env[69328]: DEBUG oslo_vmware.rw_handles [None req-29c55b94-c329-4ae5-8d18-236afae3d2b5 tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d95115-c2fb-a58c-fca8-abb4e6390b5d/disk-0.vmdk from lease info. {{(pid=69328) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 652.267663] env[69328]: DEBUG oslo_vmware.rw_handles [None req-29c55b94-c329-4ae5-8d18-236afae3d2b5 tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d95115-c2fb-a58c-fca8-abb4e6390b5d/disk-0.vmdk for reading. {{(pid=69328) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 652.362696] env[69328]: DEBUG nova.compute.utils [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 652.365104] env[69328]: DEBUG nova.compute.manager [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 652.365376] env[69328]: DEBUG nova.network.neutron [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 652.419449] env[69328]: DEBUG nova.policy [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '44b2dc5070104ec48269d8210f0ba2d3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '690511a8725a4dd6ab796a15569293a9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 652.424017] env[69328]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-58b49aef-dbfe-4127-85f4-7d7793c58a03 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.603020] env[69328]: DEBUG nova.compute.manager [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 652.756220] env[69328]: DEBUG oslo_vmware.api [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3272846, 'name': ReconfigVM_Task, 'duration_secs': 0.210048} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.756763] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Updating instance 'd37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8' progress to 33 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 652.797674] env[69328]: DEBUG nova.network.neutron [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 9753734d-90f0-4661-8029-ec312e88eb60] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 652.803031] env[69328]: DEBUG nova.network.neutron [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Successfully created port: f659c974-1a37-4e6b-bbff-d8a0858a6756 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 652.867706] env[69328]: DEBUG nova.compute.manager [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 652.946635] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e507b769-8410-40f8-ba7a-3a748202daa6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.955578] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a792464-3549-4005-995f-ae3e68c45ea2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.994381] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-282cc19a-d123-4fb5-883c-36ed7c84aea4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.002920] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a01d4627-c1e6-426f-9af8-fe213abd73a3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.018426] env[69328]: DEBUG nova.compute.provider_tree [None req-d6a49c30-7556-4612-8fbd-a35acb80e9bd tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 653.052081] env[69328]: DEBUG nova.network.neutron [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 9753734d-90f0-4661-8029-ec312e88eb60] Updating instance_info_cache with network_info: [{"id": "9c9fec0b-e493-4950-9da7-bdb3214def9f", "address": "fa:16:3e:99:24:e9", "network": {"id": "4707aea3-ce46-4fe0-bf5c-141ee5596a86", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.44", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ecd184c6b78b4e4297fb93abb94aa37d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9c9fec0b-e4", "ovs_interfaceid": "9c9fec0b-e493-4950-9da7-bdb3214def9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 653.128409] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 653.271390] env[69328]: DEBUG nova.virt.hardware [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 653.271390] env[69328]: DEBUG nova.virt.hardware [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 653.272066] env[69328]: DEBUG nova.virt.hardware [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 653.272066] env[69328]: DEBUG nova.virt.hardware [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 653.272286] env[69328]: DEBUG nova.virt.hardware [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 653.272333] env[69328]: DEBUG nova.virt.hardware [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 653.272584] env[69328]: DEBUG nova.virt.hardware [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 653.272801] env[69328]: DEBUG nova.virt.hardware [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 653.273025] env[69328]: DEBUG nova.virt.hardware [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 653.273256] env[69328]: DEBUG nova.virt.hardware [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 653.273586] env[69328]: DEBUG nova.virt.hardware [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 653.280195] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Reconfiguring VM instance instance-00000007 to detach disk 2000 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 653.280590] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c0a27f08-259e-4575-8598-b1bc836f86b1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.305022] env[69328]: DEBUG oslo_vmware.api [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Waiting for the task: (returnval){ [ 653.305022] env[69328]: value = "task-3272848" [ 653.305022] env[69328]: _type = "Task" [ 653.305022] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.312846] env[69328]: DEBUG oslo_vmware.api [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3272848, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.522242] env[69328]: DEBUG nova.scheduler.client.report [None req-d6a49c30-7556-4612-8fbd-a35acb80e9bd tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 653.554457] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Releasing lock "refresh_cache-9753734d-90f0-4661-8029-ec312e88eb60" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 653.555230] env[69328]: DEBUG nova.compute.manager [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 9753734d-90f0-4661-8029-ec312e88eb60] Instance network_info: |[{"id": "9c9fec0b-e493-4950-9da7-bdb3214def9f", "address": "fa:16:3e:99:24:e9", "network": {"id": "4707aea3-ce46-4fe0-bf5c-141ee5596a86", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.44", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ecd184c6b78b4e4297fb93abb94aa37d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9c9fec0b-e4", "ovs_interfaceid": "9c9fec0b-e493-4950-9da7-bdb3214def9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 653.556110] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 9753734d-90f0-4661-8029-ec312e88eb60] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:99:24:e9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a8b99a46-3e7f-4ef1-9e45-58e6cd17f210', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9c9fec0b-e493-4950-9da7-bdb3214def9f', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 653.566657] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 653.567324] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9753734d-90f0-4661-8029-ec312e88eb60] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 653.567593] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-775d6991-bc27-4ee0-8705-20afb1dc6434 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.588925] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 653.588925] env[69328]: value = "task-3272849" [ 653.588925] env[69328]: _type = "Task" [ 653.588925] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.597546] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272849, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.741407] env[69328]: DEBUG nova.compute.manager [req-918009eb-3347-43fc-8f1f-a83556179396 req-99b4dc96-2376-406b-b5d6-884ae66ba949 service nova] [instance: 9753734d-90f0-4661-8029-ec312e88eb60] Received event network-changed-9c9fec0b-e493-4950-9da7-bdb3214def9f {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 653.741407] env[69328]: DEBUG nova.compute.manager [req-918009eb-3347-43fc-8f1f-a83556179396 req-99b4dc96-2376-406b-b5d6-884ae66ba949 service nova] [instance: 9753734d-90f0-4661-8029-ec312e88eb60] Refreshing instance network info cache due to event network-changed-9c9fec0b-e493-4950-9da7-bdb3214def9f. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 653.741407] env[69328]: DEBUG oslo_concurrency.lockutils [req-918009eb-3347-43fc-8f1f-a83556179396 req-99b4dc96-2376-406b-b5d6-884ae66ba949 service nova] Acquiring lock "refresh_cache-9753734d-90f0-4661-8029-ec312e88eb60" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 653.741645] env[69328]: DEBUG oslo_concurrency.lockutils [req-918009eb-3347-43fc-8f1f-a83556179396 req-99b4dc96-2376-406b-b5d6-884ae66ba949 service nova] Acquired lock "refresh_cache-9753734d-90f0-4661-8029-ec312e88eb60" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 653.741740] env[69328]: DEBUG nova.network.neutron [req-918009eb-3347-43fc-8f1f-a83556179396 req-99b4dc96-2376-406b-b5d6-884ae66ba949 service nova] [instance: 9753734d-90f0-4661-8029-ec312e88eb60] Refreshing network info cache for port 9c9fec0b-e493-4950-9da7-bdb3214def9f {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 653.813377] env[69328]: DEBUG oslo_vmware.api [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3272848, 'name': ReconfigVM_Task, 'duration_secs': 0.166349} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.813685] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Reconfigured VM instance instance-00000007 to detach disk 2000 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 653.814546] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-399d6560-4ae9-427c-8fa2-2ff8c359f756 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.841939] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Reconfiguring VM instance instance-00000007 to attach disk [datastore1] d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8/d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 653.842411] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bddb1bf6-c00c-4990-a737-67738218c2aa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.863036] env[69328]: DEBUG oslo_vmware.api [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Waiting for the task: (returnval){ [ 653.863036] env[69328]: value = "task-3272850" [ 653.863036] env[69328]: _type = "Task" [ 653.863036] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.871042] env[69328]: DEBUG oslo_vmware.api [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3272850, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.878949] env[69328]: DEBUG nova.compute.manager [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 653.908828] env[69328]: DEBUG nova.virt.hardware [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 653.909051] env[69328]: DEBUG nova.virt.hardware [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 653.909487] env[69328]: DEBUG nova.virt.hardware [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 653.909689] env[69328]: DEBUG nova.virt.hardware [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 653.909852] env[69328]: DEBUG nova.virt.hardware [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 653.910078] env[69328]: DEBUG nova.virt.hardware [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 653.910323] env[69328]: DEBUG nova.virt.hardware [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 653.910478] env[69328]: DEBUG nova.virt.hardware [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 653.910685] env[69328]: DEBUG nova.virt.hardware [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 653.911937] env[69328]: DEBUG nova.virt.hardware [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 653.911937] env[69328]: DEBUG nova.virt.hardware [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 653.913288] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42bdeb7a-7d64-4bf7-8cb3-075e0b88c272 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.921909] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19f7cd91-5e33-428e-9fb3-8c8a36c7fa7d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.028780] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d6a49c30-7556-4612-8fbd-a35acb80e9bd tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.173s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 654.037237] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e22be9cc-836e-4e91-b58f-4d11cbfc8247 tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.140s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 654.037664] env[69328]: DEBUG nova.objects.instance [None req-e22be9cc-836e-4e91-b58f-4d11cbfc8247 tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Lazy-loading 'resources' on Instance uuid d97dc6d5-e55f-4b9e-91e6-cfdea82f5236 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 654.047902] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Acquiring lock "3923403b-2e8f-4033-89ee-9a907aff1d49" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 654.047902] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Lock "3923403b-2e8f-4033-89ee-9a907aff1d49" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 654.063168] env[69328]: INFO nova.scheduler.client.report [None req-d6a49c30-7556-4612-8fbd-a35acb80e9bd tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Deleted allocations for instance ed10d511-dbed-4884-8ac6-f737173f62c5 [ 654.101518] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272849, 'name': CreateVM_Task, 'duration_secs': 0.373066} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.101771] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9753734d-90f0-4661-8029-ec312e88eb60] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 654.102528] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 654.102758] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 654.103346] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 654.103631] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a0820c8a-394a-4a88-a875-1035930a6bea {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.109178] env[69328]: DEBUG oslo_vmware.api [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Waiting for the task: (returnval){ [ 654.109178] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52a3447e-8494-f4ce-85a8-e6f68037cffa" [ 654.109178] env[69328]: _type = "Task" [ 654.109178] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.119355] env[69328]: DEBUG oslo_vmware.api [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52a3447e-8494-f4ce-85a8-e6f68037cffa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.379844] env[69328]: DEBUG oslo_vmware.api [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3272850, 'name': ReconfigVM_Task, 'duration_secs': 0.331627} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.380190] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Reconfigured VM instance instance-00000007 to attach disk [datastore1] d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8/d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 654.380537] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Updating instance 'd37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8' progress to 50 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 654.578960] env[69328]: DEBUG nova.network.neutron [req-918009eb-3347-43fc-8f1f-a83556179396 req-99b4dc96-2376-406b-b5d6-884ae66ba949 service nova] [instance: 9753734d-90f0-4661-8029-ec312e88eb60] Updated VIF entry in instance network info cache for port 9c9fec0b-e493-4950-9da7-bdb3214def9f. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 654.578960] env[69328]: DEBUG nova.network.neutron [req-918009eb-3347-43fc-8f1f-a83556179396 req-99b4dc96-2376-406b-b5d6-884ae66ba949 service nova] [instance: 9753734d-90f0-4661-8029-ec312e88eb60] Updating instance_info_cache with network_info: [{"id": "9c9fec0b-e493-4950-9da7-bdb3214def9f", "address": "fa:16:3e:99:24:e9", "network": {"id": "4707aea3-ce46-4fe0-bf5c-141ee5596a86", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.44", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ecd184c6b78b4e4297fb93abb94aa37d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9c9fec0b-e4", "ovs_interfaceid": "9c9fec0b-e493-4950-9da7-bdb3214def9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 654.579559] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d6a49c30-7556-4612-8fbd-a35acb80e9bd tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "ed10d511-dbed-4884-8ac6-f737173f62c5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.916s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 654.625192] env[69328]: DEBUG oslo_vmware.api [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52a3447e-8494-f4ce-85a8-e6f68037cffa, 'name': SearchDatastore_Task, 'duration_secs': 0.012076} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.626335] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 654.626335] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 9753734d-90f0-4661-8029-ec312e88eb60] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 654.626335] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 654.626335] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 654.626606] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 654.626606] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cda48b1a-c627-40f9-8855-a2ec5289d1ac {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.637947] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 654.638243] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 654.641655] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e22f843c-3150-421e-9c7c-8f87bd9831b7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.650415] env[69328]: DEBUG oslo_vmware.api [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Waiting for the task: (returnval){ [ 654.650415] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]520ea66d-afba-e1f8-e0e0-389dadbdb9b8" [ 654.650415] env[69328]: _type = "Task" [ 654.650415] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.671079] env[69328]: DEBUG oslo_vmware.api [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]520ea66d-afba-e1f8-e0e0-389dadbdb9b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.682174] env[69328]: DEBUG nova.network.neutron [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Successfully updated port: f659c974-1a37-4e6b-bbff-d8a0858a6756 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 654.887808] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d3b585f-ac5c-48fc-a990-b883497ca4fa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.911490] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a30ffaf-4b95-4b63-8fc1-21d2e401d76b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.932431] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Updating instance 'd37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8' progress to 67 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 655.083817] env[69328]: DEBUG oslo_concurrency.lockutils [req-918009eb-3347-43fc-8f1f-a83556179396 req-99b4dc96-2376-406b-b5d6-884ae66ba949 service nova] Releasing lock "refresh_cache-9753734d-90f0-4661-8029-ec312e88eb60" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 655.089408] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-155bca54-0f0a-42ef-9b1a-fe55e7219bd4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.100594] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce97a084-1dc5-43ff-8327-c70717bebd62 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.135458] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43d55ebc-16ab-49c6-befa-1441847a3e95 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.147971] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9591d86a-49a0-4f5b-b337-c59b9f8e4dc9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.164964] env[69328]: DEBUG oslo_vmware.api [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]520ea66d-afba-e1f8-e0e0-389dadbdb9b8, 'name': SearchDatastore_Task, 'duration_secs': 0.012512} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.173980] env[69328]: DEBUG nova.compute.provider_tree [None req-e22be9cc-836e-4e91-b58f-4d11cbfc8247 tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 655.179616] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b382f55-0aa0-44a7-8e11-38a1b3b73c12 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.184874] env[69328]: DEBUG oslo_concurrency.lockutils [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Acquiring lock "refresh_cache-15a8de08-4d20-4329-9867-53e5dff82878" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 655.185041] env[69328]: DEBUG oslo_concurrency.lockutils [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Acquired lock "refresh_cache-15a8de08-4d20-4329-9867-53e5dff82878" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 655.185200] env[69328]: DEBUG nova.network.neutron [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 655.189148] env[69328]: DEBUG oslo_vmware.api [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Waiting for the task: (returnval){ [ 655.189148] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]524a5787-1c23-798d-6f6b-a520d77630a8" [ 655.189148] env[69328]: _type = "Task" [ 655.189148] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.199653] env[69328]: DEBUG oslo_vmware.api [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]524a5787-1c23-798d-6f6b-a520d77630a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.304912] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "c465c53f-d96b-461b-b8ff-b19929b4f789" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 655.305270] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "c465c53f-d96b-461b-b8ff-b19929b4f789" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 655.499750] env[69328]: DEBUG nova.network.neutron [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Port cbf38f9d-1507-45bb-9684-bf804c86b93b binding to destination host cpu-1 is already ACTIVE {{(pid=69328) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 655.681901] env[69328]: DEBUG nova.scheduler.client.report [None req-e22be9cc-836e-4e91-b58f-4d11cbfc8247 tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 655.704087] env[69328]: DEBUG oslo_vmware.api [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]524a5787-1c23-798d-6f6b-a520d77630a8, 'name': SearchDatastore_Task, 'duration_secs': 0.021828} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.704367] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 655.704643] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 9753734d-90f0-4661-8029-ec312e88eb60/9753734d-90f0-4661-8029-ec312e88eb60.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 655.704922] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d03fb1cc-61f2-4a56-9640-ec68033a26ca {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.714180] env[69328]: DEBUG oslo_vmware.api [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Waiting for the task: (returnval){ [ 655.714180] env[69328]: value = "task-3272851" [ 655.714180] env[69328]: _type = "Task" [ 655.714180] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.723661] env[69328]: DEBUG oslo_vmware.api [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Task: {'id': task-3272851, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.725247] env[69328]: DEBUG nova.network.neutron [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 655.804226] env[69328]: DEBUG nova.compute.manager [req-a871c298-8c5d-4d46-877b-7b9d0b7dc34e req-3f5a2944-1fc7-437f-97e3-42339797e6c4 service nova] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Received event network-vif-plugged-f659c974-1a37-4e6b-bbff-d8a0858a6756 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 655.804467] env[69328]: DEBUG oslo_concurrency.lockutils [req-a871c298-8c5d-4d46-877b-7b9d0b7dc34e req-3f5a2944-1fc7-437f-97e3-42339797e6c4 service nova] Acquiring lock "15a8de08-4d20-4329-9867-53e5dff82878-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 655.804606] env[69328]: DEBUG oslo_concurrency.lockutils [req-a871c298-8c5d-4d46-877b-7b9d0b7dc34e req-3f5a2944-1fc7-437f-97e3-42339797e6c4 service nova] Lock "15a8de08-4d20-4329-9867-53e5dff82878-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 655.804780] env[69328]: DEBUG oslo_concurrency.lockutils [req-a871c298-8c5d-4d46-877b-7b9d0b7dc34e req-3f5a2944-1fc7-437f-97e3-42339797e6c4 service nova] Lock "15a8de08-4d20-4329-9867-53e5dff82878-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 655.804949] env[69328]: DEBUG nova.compute.manager [req-a871c298-8c5d-4d46-877b-7b9d0b7dc34e req-3f5a2944-1fc7-437f-97e3-42339797e6c4 service nova] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] No waiting events found dispatching network-vif-plugged-f659c974-1a37-4e6b-bbff-d8a0858a6756 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 655.805184] env[69328]: WARNING nova.compute.manager [req-a871c298-8c5d-4d46-877b-7b9d0b7dc34e req-3f5a2944-1fc7-437f-97e3-42339797e6c4 service nova] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Received unexpected event network-vif-plugged-f659c974-1a37-4e6b-bbff-d8a0858a6756 for instance with vm_state building and task_state spawning. [ 655.805463] env[69328]: DEBUG nova.compute.manager [req-a871c298-8c5d-4d46-877b-7b9d0b7dc34e req-3f5a2944-1fc7-437f-97e3-42339797e6c4 service nova] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Received event network-changed-f659c974-1a37-4e6b-bbff-d8a0858a6756 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 655.805715] env[69328]: DEBUG nova.compute.manager [req-a871c298-8c5d-4d46-877b-7b9d0b7dc34e req-3f5a2944-1fc7-437f-97e3-42339797e6c4 service nova] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Refreshing instance network info cache due to event network-changed-f659c974-1a37-4e6b-bbff-d8a0858a6756. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 655.805983] env[69328]: DEBUG oslo_concurrency.lockutils [req-a871c298-8c5d-4d46-877b-7b9d0b7dc34e req-3f5a2944-1fc7-437f-97e3-42339797e6c4 service nova] Acquiring lock "refresh_cache-15a8de08-4d20-4329-9867-53e5dff82878" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 655.890370] env[69328]: DEBUG nova.network.neutron [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Updating instance_info_cache with network_info: [{"id": "f659c974-1a37-4e6b-bbff-d8a0858a6756", "address": "fa:16:3e:73:52:6a", "network": {"id": "032910e6-4d2e-415b-ac3e-ed7a7fadf536", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1432969230-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "690511a8725a4dd6ab796a15569293a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "be5c038c-29e5-43c9-91ab-9eb3094b5337", "external-id": "nsx-vlan-transportzone-511", "segmentation_id": 511, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf659c974-1a", "ovs_interfaceid": "f659c974-1a37-4e6b-bbff-d8a0858a6756", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 656.186773] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e22be9cc-836e-4e91-b58f-4d11cbfc8247 tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.150s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 656.190117] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.113s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 656.192098] env[69328]: INFO nova.compute.claims [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 656.214712] env[69328]: INFO nova.scheduler.client.report [None req-e22be9cc-836e-4e91-b58f-4d11cbfc8247 tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Deleted allocations for instance d97dc6d5-e55f-4b9e-91e6-cfdea82f5236 [ 656.232227] env[69328]: DEBUG oslo_vmware.api [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Task: {'id': task-3272851, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.394198] env[69328]: DEBUG oslo_concurrency.lockutils [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Releasing lock "refresh_cache-15a8de08-4d20-4329-9867-53e5dff82878" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 656.395057] env[69328]: DEBUG nova.compute.manager [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Instance network_info: |[{"id": "f659c974-1a37-4e6b-bbff-d8a0858a6756", "address": "fa:16:3e:73:52:6a", "network": {"id": "032910e6-4d2e-415b-ac3e-ed7a7fadf536", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1432969230-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "690511a8725a4dd6ab796a15569293a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "be5c038c-29e5-43c9-91ab-9eb3094b5337", "external-id": "nsx-vlan-transportzone-511", "segmentation_id": 511, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf659c974-1a", "ovs_interfaceid": "f659c974-1a37-4e6b-bbff-d8a0858a6756", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 656.395057] env[69328]: DEBUG oslo_concurrency.lockutils [req-a871c298-8c5d-4d46-877b-7b9d0b7dc34e req-3f5a2944-1fc7-437f-97e3-42339797e6c4 service nova] Acquired lock "refresh_cache-15a8de08-4d20-4329-9867-53e5dff82878" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 656.395277] env[69328]: DEBUG nova.network.neutron [req-a871c298-8c5d-4d46-877b-7b9d0b7dc34e req-3f5a2944-1fc7-437f-97e3-42339797e6c4 service nova] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Refreshing network info cache for port f659c974-1a37-4e6b-bbff-d8a0858a6756 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 656.396343] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:73:52:6a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'be5c038c-29e5-43c9-91ab-9eb3094b5337', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f659c974-1a37-4e6b-bbff-d8a0858a6756', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 656.404522] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Creating folder: Project (690511a8725a4dd6ab796a15569293a9). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 656.405014] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-15538532-138b-4a07-ae4c-aa0efe2a72d2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.419105] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Created folder: Project (690511a8725a4dd6ab796a15569293a9) in parent group-v653649. [ 656.419312] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Creating folder: Instances. Parent ref: group-v653710. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 656.419636] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-14ccdb7f-23c1-4785-92c3-17046e6dfc67 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.431750] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Created folder: Instances in parent group-v653710. [ 656.432036] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 656.432272] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 656.432502] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bfcd5346-ccf1-4b1f-8978-fe318fd36bff {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.453651] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 656.453651] env[69328]: value = "task-3272854" [ 656.453651] env[69328]: _type = "Task" [ 656.453651] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.461726] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272854, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.526008] env[69328]: DEBUG oslo_concurrency.lockutils [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Acquiring lock "d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 656.526272] env[69328]: DEBUG oslo_concurrency.lockutils [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Lock "d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 656.526473] env[69328]: DEBUG oslo_concurrency.lockutils [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Lock "d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 656.728472] env[69328]: DEBUG oslo_vmware.api [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Task: {'id': task-3272851, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.614568} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.728797] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 9753734d-90f0-4661-8029-ec312e88eb60/9753734d-90f0-4661-8029-ec312e88eb60.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 656.728968] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 9753734d-90f0-4661-8029-ec312e88eb60] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 656.729231] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c8d046c7-ea0b-4779-94ea-bedf9e31aa83 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.736143] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e22be9cc-836e-4e91-b58f-4d11cbfc8247 tempest-ServerExternalEventsTest-1629552772 tempest-ServerExternalEventsTest-1629552772-project-member] Lock "d97dc6d5-e55f-4b9e-91e6-cfdea82f5236" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.752s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 656.738812] env[69328]: DEBUG oslo_vmware.api [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Waiting for the task: (returnval){ [ 656.738812] env[69328]: value = "task-3272855" [ 656.738812] env[69328]: _type = "Task" [ 656.738812] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.748347] env[69328]: DEBUG oslo_vmware.api [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Task: {'id': task-3272855, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.963836] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272854, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.126553] env[69328]: DEBUG nova.network.neutron [req-a871c298-8c5d-4d46-877b-7b9d0b7dc34e req-3f5a2944-1fc7-437f-97e3-42339797e6c4 service nova] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Updated VIF entry in instance network info cache for port f659c974-1a37-4e6b-bbff-d8a0858a6756. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 657.126993] env[69328]: DEBUG nova.network.neutron [req-a871c298-8c5d-4d46-877b-7b9d0b7dc34e req-3f5a2944-1fc7-437f-97e3-42339797e6c4 service nova] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Updating instance_info_cache with network_info: [{"id": "f659c974-1a37-4e6b-bbff-d8a0858a6756", "address": "fa:16:3e:73:52:6a", "network": {"id": "032910e6-4d2e-415b-ac3e-ed7a7fadf536", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1432969230-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "690511a8725a4dd6ab796a15569293a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "be5c038c-29e5-43c9-91ab-9eb3094b5337", "external-id": "nsx-vlan-transportzone-511", "segmentation_id": 511, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf659c974-1a", "ovs_interfaceid": "f659c974-1a37-4e6b-bbff-d8a0858a6756", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 657.253181] env[69328]: DEBUG oslo_vmware.api [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Task: {'id': task-3272855, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.180294} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.253558] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 9753734d-90f0-4661-8029-ec312e88eb60] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 657.254565] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4de97fc1-5c8b-4a1b-b744-396a755a0712 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.294087] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 9753734d-90f0-4661-8029-ec312e88eb60] Reconfiguring VM instance instance-00000014 to attach disk [datastore2] 9753734d-90f0-4661-8029-ec312e88eb60/9753734d-90f0-4661-8029-ec312e88eb60.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 657.298009] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5734b146-dbef-44f1-87c6-435684530094 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.328714] env[69328]: DEBUG oslo_vmware.api [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Waiting for the task: (returnval){ [ 657.328714] env[69328]: value = "task-3272856" [ 657.328714] env[69328]: _type = "Task" [ 657.328714] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.342125] env[69328]: DEBUG oslo_vmware.api [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Task: {'id': task-3272856, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.467015] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272854, 'name': CreateVM_Task, 'duration_secs': 0.680718} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.469642] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 657.470821] env[69328]: DEBUG oslo_concurrency.lockutils [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 657.470821] env[69328]: DEBUG oslo_concurrency.lockutils [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 657.471187] env[69328]: DEBUG oslo_concurrency.lockutils [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 657.471765] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76ee8494-1dac-4e80-a883-d5e0650e22a8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.477772] env[69328]: DEBUG oslo_vmware.api [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Waiting for the task: (returnval){ [ 657.477772] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]525fb940-4a5a-2b4f-7f47-c06dcbd3507a" [ 657.477772] env[69328]: _type = "Task" [ 657.477772] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.486972] env[69328]: DEBUG oslo_vmware.api [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]525fb940-4a5a-2b4f-7f47-c06dcbd3507a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.592586] env[69328]: DEBUG oslo_concurrency.lockutils [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Acquiring lock "refresh_cache-d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 657.592787] env[69328]: DEBUG oslo_concurrency.lockutils [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Acquired lock "refresh_cache-d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 657.592970] env[69328]: DEBUG nova.network.neutron [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 657.632785] env[69328]: DEBUG oslo_concurrency.lockutils [req-a871c298-8c5d-4d46-877b-7b9d0b7dc34e req-3f5a2944-1fc7-437f-97e3-42339797e6c4 service nova] Releasing lock "refresh_cache-15a8de08-4d20-4329-9867-53e5dff82878" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 657.692529] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9897cbb7-2542-48f7-91cc-c243ae04cb08 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.700579] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f96d274d-e42b-496f-b8b0-a4a18434963e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.733675] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4a8b101-870b-47c4-9cbe-55930d1a886f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.744023] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8582eef5-bd8e-4f8b-bf33-628760d9255c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.761523] env[69328]: DEBUG nova.compute.provider_tree [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 657.840664] env[69328]: DEBUG oslo_vmware.api [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Task: {'id': task-3272856, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.988869] env[69328]: DEBUG oslo_vmware.api [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]525fb940-4a5a-2b4f-7f47-c06dcbd3507a, 'name': SearchDatastore_Task, 'duration_secs': 0.011396} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.989217] env[69328]: DEBUG oslo_concurrency.lockutils [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 657.989473] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 657.989716] env[69328]: DEBUG oslo_concurrency.lockutils [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 657.989861] env[69328]: DEBUG oslo_concurrency.lockutils [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 657.990843] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 657.990843] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5524fb69-9837-466a-bd0c-4adc341494e6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.000565] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 658.000793] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 658.001618] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f355e08c-6127-42fc-a299-24ecac940d3d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.007929] env[69328]: DEBUG oslo_vmware.api [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Waiting for the task: (returnval){ [ 658.007929] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52de51a5-c98b-8529-ae46-8e35c3b1822f" [ 658.007929] env[69328]: _type = "Task" [ 658.007929] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.017292] env[69328]: DEBUG oslo_vmware.api [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52de51a5-c98b-8529-ae46-8e35c3b1822f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.264553] env[69328]: DEBUG nova.scheduler.client.report [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 658.340098] env[69328]: DEBUG oslo_vmware.api [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Task: {'id': task-3272856, 'name': ReconfigVM_Task, 'duration_secs': 0.594488} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.340518] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 9753734d-90f0-4661-8029-ec312e88eb60] Reconfigured VM instance instance-00000014 to attach disk [datastore2] 9753734d-90f0-4661-8029-ec312e88eb60/9753734d-90f0-4661-8029-ec312e88eb60.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 658.341264] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c08969e7-96e9-434a-a356-7e762afd78a9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.363732] env[69328]: DEBUG oslo_vmware.api [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Waiting for the task: (returnval){ [ 658.363732] env[69328]: value = "task-3272857" [ 658.363732] env[69328]: _type = "Task" [ 658.363732] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.372843] env[69328]: DEBUG oslo_vmware.api [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Task: {'id': task-3272857, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.386625] env[69328]: DEBUG nova.network.neutron [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Updating instance_info_cache with network_info: [{"id": "cbf38f9d-1507-45bb-9684-bf804c86b93b", "address": "fa:16:3e:a1:39:be", "network": {"id": "4707aea3-ce46-4fe0-bf5c-141ee5596a86", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.236", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ecd184c6b78b4e4297fb93abb94aa37d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbf38f9d-15", "ovs_interfaceid": "cbf38f9d-1507-45bb-9684-bf804c86b93b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 658.519474] env[69328]: DEBUG oslo_vmware.api [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52de51a5-c98b-8529-ae46-8e35c3b1822f, 'name': SearchDatastore_Task, 'duration_secs': 0.010407} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.520393] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0fe71bc-fcb1-430f-bc27-d7f45573fa5c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.527378] env[69328]: DEBUG oslo_vmware.api [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Waiting for the task: (returnval){ [ 658.527378] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]527d7fae-030a-3931-a609-2c493677101d" [ 658.527378] env[69328]: _type = "Task" [ 658.527378] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.538706] env[69328]: DEBUG oslo_vmware.api [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]527d7fae-030a-3931-a609-2c493677101d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.770580] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.580s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 658.770925] env[69328]: DEBUG nova.compute.manager [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 658.773602] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0ea93919-e76c-479d-8b58-9211c6c69435 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.114s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 658.773881] env[69328]: DEBUG nova.objects.instance [None req-0ea93919-e76c-479d-8b58-9211c6c69435 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Lazy-loading 'resources' on Instance uuid 88f9f0c2-0c55-45bf-a494-8f1ee4922443 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 658.874989] env[69328]: DEBUG oslo_vmware.api [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Task: {'id': task-3272857, 'name': Rename_Task, 'duration_secs': 0.219306} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.875314] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 9753734d-90f0-4661-8029-ec312e88eb60] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 658.875510] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-56785299-6528-4acd-9d2e-363778c29511 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.885555] env[69328]: DEBUG oslo_vmware.api [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Waiting for the task: (returnval){ [ 658.885555] env[69328]: value = "task-3272858" [ 658.885555] env[69328]: _type = "Task" [ 658.885555] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.888936] env[69328]: DEBUG oslo_concurrency.lockutils [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Releasing lock "refresh_cache-d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 658.899234] env[69328]: DEBUG oslo_vmware.api [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Task: {'id': task-3272858, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.040481] env[69328]: DEBUG oslo_vmware.api [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]527d7fae-030a-3931-a609-2c493677101d, 'name': SearchDatastore_Task, 'duration_secs': 0.014524} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.040752] env[69328]: DEBUG oslo_concurrency.lockutils [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 659.041221] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 15a8de08-4d20-4329-9867-53e5dff82878/15a8de08-4d20-4329-9867-53e5dff82878.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 659.041349] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c04ecffe-4ae4-4579-9d67-f0f16aa00559 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.051523] env[69328]: DEBUG oslo_vmware.api [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Waiting for the task: (returnval){ [ 659.051523] env[69328]: value = "task-3272859" [ 659.051523] env[69328]: _type = "Task" [ 659.051523] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.060882] env[69328]: DEBUG oslo_vmware.api [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3272859, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.279204] env[69328]: DEBUG nova.compute.utils [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 659.281857] env[69328]: DEBUG nova.compute.manager [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 659.282476] env[69328]: DEBUG nova.network.neutron [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 659.400591] env[69328]: DEBUG oslo_vmware.api [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Task: {'id': task-3272858, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.404987] env[69328]: DEBUG nova.policy [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7a36279cfb184fb79ea8f5bd119213b2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '641bd900bcd6477f9ec6a026cf00e42f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 659.424486] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a790e7c4-f1a7-4183-a9ca-534cd5c9446b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.453428] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59eb3a94-097f-4288-a463-33848b03ba90 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.462756] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Updating instance 'd37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8' progress to 83 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 659.562972] env[69328]: DEBUG oslo_vmware.api [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3272859, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.782293] env[69328]: DEBUG nova.compute.manager [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 659.860676] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75077836-2b15-4cad-87aa-189b20ba69ad {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.869326] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a7f573f-1689-4993-9a0a-ac6d673ce01e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.907236] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5960569b-65ca-4619-a177-61cbb2a32031 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.919766] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b57fb7e7-776e-4d99-bd3e-f4a69744c5e3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.924485] env[69328]: DEBUG oslo_vmware.api [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Task: {'id': task-3272858, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.937194] env[69328]: DEBUG nova.compute.provider_tree [None req-0ea93919-e76c-479d-8b58-9211c6c69435 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 659.969805] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 659.971614] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-64be2ab4-81af-40e3-9314-9fb158cd5f36 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.025292] env[69328]: DEBUG oslo_vmware.api [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Waiting for the task: (returnval){ [ 660.025292] env[69328]: value = "task-3272860" [ 660.025292] env[69328]: _type = "Task" [ 660.025292] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.063785] env[69328]: DEBUG oslo_vmware.api [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3272859, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.171378] env[69328]: DEBUG nova.network.neutron [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Successfully created port: 6a14c441-36e5-4670-8f21-54b1113b23ff {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 660.413434] env[69328]: DEBUG oslo_vmware.api [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Task: {'id': task-3272858, 'name': PowerOnVM_Task, 'duration_secs': 1.229378} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.413716] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 9753734d-90f0-4661-8029-ec312e88eb60] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 660.413910] env[69328]: INFO nova.compute.manager [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 9753734d-90f0-4661-8029-ec312e88eb60] Took 9.10 seconds to spawn the instance on the hypervisor. [ 660.414102] env[69328]: DEBUG nova.compute.manager [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 9753734d-90f0-4661-8029-ec312e88eb60] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 660.415147] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbb9d8ca-c6db-4c7f-8dfb-0865a3692e96 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.441227] env[69328]: DEBUG nova.scheduler.client.report [None req-0ea93919-e76c-479d-8b58-9211c6c69435 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 660.535117] env[69328]: DEBUG oslo_vmware.api [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3272860, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.564655] env[69328]: DEBUG oslo_vmware.api [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3272859, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.764352] env[69328]: DEBUG oslo_vmware.rw_handles [None req-29c55b94-c329-4ae5-8d18-236afae3d2b5 tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d95115-c2fb-a58c-fca8-abb4e6390b5d/disk-0.vmdk. {{(pid=69328) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 660.765439] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-961e2ef0-6536-4de6-be8e-2b20c6234d8f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.776595] env[69328]: DEBUG oslo_vmware.rw_handles [None req-29c55b94-c329-4ae5-8d18-236afae3d2b5 tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d95115-c2fb-a58c-fca8-abb4e6390b5d/disk-0.vmdk is in state: ready. {{(pid=69328) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 660.776780] env[69328]: ERROR oslo_vmware.rw_handles [None req-29c55b94-c329-4ae5-8d18-236afae3d2b5 tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d95115-c2fb-a58c-fca8-abb4e6390b5d/disk-0.vmdk due to incomplete transfer. [ 660.777032] env[69328]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-136e6f80-27b5-46b4-9cd2-57f756d06bb0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.788118] env[69328]: DEBUG oslo_vmware.rw_handles [None req-29c55b94-c329-4ae5-8d18-236afae3d2b5 tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d95115-c2fb-a58c-fca8-abb4e6390b5d/disk-0.vmdk. {{(pid=69328) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 660.788571] env[69328]: DEBUG nova.virt.vmwareapi.images [None req-29c55b94-c329-4ae5-8d18-236afae3d2b5 tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Uploaded image 5565639c-8723-42da-a19f-db38009d07f6 to the Glance image server {{(pid=69328) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 660.794613] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-29c55b94-c329-4ae5-8d18-236afae3d2b5 tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Destroying the VM {{(pid=69328) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 660.794613] env[69328]: DEBUG nova.compute.manager [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 660.795571] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-3d08de0d-fb37-4075-8c45-f22ff9134f6c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.805957] env[69328]: DEBUG oslo_vmware.api [None req-29c55b94-c329-4ae5-8d18-236afae3d2b5 tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Waiting for the task: (returnval){ [ 660.805957] env[69328]: value = "task-3272861" [ 660.805957] env[69328]: _type = "Task" [ 660.805957] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.817758] env[69328]: DEBUG oslo_vmware.api [None req-29c55b94-c329-4ae5-8d18-236afae3d2b5 tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': task-3272861, 'name': Destroy_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.821230] env[69328]: DEBUG nova.virt.hardware [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 660.821230] env[69328]: DEBUG nova.virt.hardware [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 660.821230] env[69328]: DEBUG nova.virt.hardware [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 660.821416] env[69328]: DEBUG nova.virt.hardware [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 660.821416] env[69328]: DEBUG nova.virt.hardware [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 660.821583] env[69328]: DEBUG nova.virt.hardware [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 660.821865] env[69328]: DEBUG nova.virt.hardware [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 660.822146] env[69328]: DEBUG nova.virt.hardware [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 660.824393] env[69328]: DEBUG nova.virt.hardware [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 660.824393] env[69328]: DEBUG nova.virt.hardware [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 660.824393] env[69328]: DEBUG nova.virt.hardware [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 660.824393] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8c54390-c012-4fb4-9a7c-ae900310a26f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.835191] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f23a0f9-7a98-46b9-abb1-c049c9fd22eb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.939338] env[69328]: INFO nova.compute.manager [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 9753734d-90f0-4661-8029-ec312e88eb60] Took 39.30 seconds to build instance. [ 660.946757] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0ea93919-e76c-479d-8b58-9211c6c69435 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.173s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 660.949773] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 28.665s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 660.950299] env[69328]: DEBUG nova.objects.instance [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69328) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 660.983323] env[69328]: INFO nova.scheduler.client.report [None req-0ea93919-e76c-479d-8b58-9211c6c69435 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Deleted allocations for instance 88f9f0c2-0c55-45bf-a494-8f1ee4922443 [ 661.037684] env[69328]: DEBUG oslo_vmware.api [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3272860, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.066244] env[69328]: DEBUG oslo_vmware.api [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3272859, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.74215} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.066789] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 15a8de08-4d20-4329-9867-53e5dff82878/15a8de08-4d20-4329-9867-53e5dff82878.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 661.067054] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 661.067366] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5eadee68-a9b5-4476-92f6-f3fab8f2f4c4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.077562] env[69328]: DEBUG oslo_vmware.api [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Waiting for the task: (returnval){ [ 661.077562] env[69328]: value = "task-3272862" [ 661.077562] env[69328]: _type = "Task" [ 661.077562] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.100368] env[69328]: DEBUG oslo_vmware.api [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3272862, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.316558] env[69328]: DEBUG oslo_vmware.api [None req-29c55b94-c329-4ae5-8d18-236afae3d2b5 tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': task-3272861, 'name': Destroy_Task, 'duration_secs': 0.388583} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.316754] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-29c55b94-c329-4ae5-8d18-236afae3d2b5 tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Destroyed the VM [ 661.317140] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-29c55b94-c329-4ae5-8d18-236afae3d2b5 tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Deleting Snapshot of the VM instance {{(pid=69328) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 661.317414] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-03f4c51c-5f92-4810-baa4-43f912c12d1c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.329543] env[69328]: DEBUG oslo_vmware.api [None req-29c55b94-c329-4ae5-8d18-236afae3d2b5 tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Waiting for the task: (returnval){ [ 661.329543] env[69328]: value = "task-3272863" [ 661.329543] env[69328]: _type = "Task" [ 661.329543] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.342624] env[69328]: DEBUG oslo_vmware.api [None req-29c55b94-c329-4ae5-8d18-236afae3d2b5 tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': task-3272863, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.441481] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3b5a6e28-6a94-4a34-9d6e-6410b0f81f2a tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Lock "9753734d-90f0-4661-8029-ec312e88eb60" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.175s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 661.496411] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0ea93919-e76c-479d-8b58-9211c6c69435 tempest-FloatingIPsAssociationNegativeTestJSON-930244320 tempest-FloatingIPsAssociationNegativeTestJSON-930244320-project-member] Lock "88f9f0c2-0c55-45bf-a494-8f1ee4922443" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.938s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 661.538406] env[69328]: DEBUG oslo_vmware.api [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3272860, 'name': PowerOnVM_Task, 'duration_secs': 1.20217} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.538777] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 661.538992] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-91670b81-7bea-44af-aa11-367db8c3b976 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Updating instance 'd37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8' progress to 100 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 661.596886] env[69328]: DEBUG oslo_vmware.api [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3272862, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.09104} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.598134] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 661.599360] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec11dae9-4742-466b-bf07-bd5dc1ddf036 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.604572] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4b14b71-fb47-42fe-be0c-32b77bc5a11b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.614809] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-714834f3-d2c5-4180-b8e5-d96dcaf30522 tempest-ServersAdminNegativeTestJSON-1019803066 tempest-ServersAdminNegativeTestJSON-1019803066-project-admin] [instance: 9753734d-90f0-4661-8029-ec312e88eb60] Suspending the VM {{(pid=69328) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 661.627727] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-c438e137-5fb3-46bd-b889-929ed56b4b03 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.645632] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Reconfiguring VM instance instance-00000015 to attach disk [datastore2] 15a8de08-4d20-4329-9867-53e5dff82878/15a8de08-4d20-4329-9867-53e5dff82878.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 661.646585] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-58de7c61-94f5-4b0d-8fde-a40acc3af062 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.669969] env[69328]: DEBUG oslo_vmware.api [None req-714834f3-d2c5-4180-b8e5-d96dcaf30522 tempest-ServersAdminNegativeTestJSON-1019803066 tempest-ServersAdminNegativeTestJSON-1019803066-project-admin] Waiting for the task: (returnval){ [ 661.669969] env[69328]: value = "task-3272864" [ 661.669969] env[69328]: _type = "Task" [ 661.669969] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.671471] env[69328]: DEBUG oslo_vmware.api [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Waiting for the task: (returnval){ [ 661.671471] env[69328]: value = "task-3272865" [ 661.671471] env[69328]: _type = "Task" [ 661.671471] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.689741] env[69328]: DEBUG oslo_vmware.api [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3272865, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.689913] env[69328]: DEBUG oslo_vmware.api [None req-714834f3-d2c5-4180-b8e5-d96dcaf30522 tempest-ServersAdminNegativeTestJSON-1019803066 tempest-ServersAdminNegativeTestJSON-1019803066-project-admin] Task: {'id': task-3272864, 'name': SuspendVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.847116] env[69328]: DEBUG oslo_vmware.api [None req-29c55b94-c329-4ae5-8d18-236afae3d2b5 tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': task-3272863, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.945929] env[69328]: DEBUG nova.compute.manager [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 661.963235] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f3bfa37f-e7ab-4894-836d-28ef08845e83 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 661.964511] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.734s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 661.967523] env[69328]: INFO nova.compute.claims [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 662.192285] env[69328]: DEBUG oslo_vmware.api [None req-714834f3-d2c5-4180-b8e5-d96dcaf30522 tempest-ServersAdminNegativeTestJSON-1019803066 tempest-ServersAdminNegativeTestJSON-1019803066-project-admin] Task: {'id': task-3272864, 'name': SuspendVM_Task} progress is 58%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.192718] env[69328]: DEBUG oslo_vmware.api [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3272865, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.200673] env[69328]: DEBUG nova.compute.manager [req-9d54cfa3-d8ad-461e-bd09-2d28652883c9 req-89218522-ec4a-47ab-adc4-733087aefe95 service nova] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Received event network-vif-plugged-6a14c441-36e5-4670-8f21-54b1113b23ff {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 662.200887] env[69328]: DEBUG oslo_concurrency.lockutils [req-9d54cfa3-d8ad-461e-bd09-2d28652883c9 req-89218522-ec4a-47ab-adc4-733087aefe95 service nova] Acquiring lock "5b0e8bef-dcfc-4c5e-89d2-aa1748050d29-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 662.201157] env[69328]: DEBUG oslo_concurrency.lockutils [req-9d54cfa3-d8ad-461e-bd09-2d28652883c9 req-89218522-ec4a-47ab-adc4-733087aefe95 service nova] Lock "5b0e8bef-dcfc-4c5e-89d2-aa1748050d29-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 662.201401] env[69328]: DEBUG oslo_concurrency.lockutils [req-9d54cfa3-d8ad-461e-bd09-2d28652883c9 req-89218522-ec4a-47ab-adc4-733087aefe95 service nova] Lock "5b0e8bef-dcfc-4c5e-89d2-aa1748050d29-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 662.201497] env[69328]: DEBUG nova.compute.manager [req-9d54cfa3-d8ad-461e-bd09-2d28652883c9 req-89218522-ec4a-47ab-adc4-733087aefe95 service nova] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] No waiting events found dispatching network-vif-plugged-6a14c441-36e5-4670-8f21-54b1113b23ff {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 662.201657] env[69328]: WARNING nova.compute.manager [req-9d54cfa3-d8ad-461e-bd09-2d28652883c9 req-89218522-ec4a-47ab-adc4-733087aefe95 service nova] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Received unexpected event network-vif-plugged-6a14c441-36e5-4670-8f21-54b1113b23ff for instance with vm_state building and task_state spawning. [ 662.343922] env[69328]: DEBUG oslo_vmware.api [None req-29c55b94-c329-4ae5-8d18-236afae3d2b5 tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': task-3272863, 'name': RemoveSnapshot_Task, 'duration_secs': 0.703593} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.345531] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-29c55b94-c329-4ae5-8d18-236afae3d2b5 tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Deleted Snapshot of the VM instance {{(pid=69328) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 662.346176] env[69328]: INFO nova.compute.manager [None req-29c55b94-c329-4ae5-8d18-236afae3d2b5 tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Took 14.29 seconds to snapshot the instance on the hypervisor. [ 662.407639] env[69328]: DEBUG nova.network.neutron [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Successfully updated port: 6a14c441-36e5-4670-8f21-54b1113b23ff {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 662.474664] env[69328]: DEBUG oslo_concurrency.lockutils [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 662.691490] env[69328]: DEBUG oslo_vmware.api [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3272865, 'name': ReconfigVM_Task, 'duration_secs': 1.016077} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.694873] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Reconfigured VM instance instance-00000015 to attach disk [datastore2] 15a8de08-4d20-4329-9867-53e5dff82878/15a8de08-4d20-4329-9867-53e5dff82878.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 662.697128] env[69328]: DEBUG oslo_vmware.api [None req-714834f3-d2c5-4180-b8e5-d96dcaf30522 tempest-ServersAdminNegativeTestJSON-1019803066 tempest-ServersAdminNegativeTestJSON-1019803066-project-admin] Task: {'id': task-3272864, 'name': SuspendVM_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.697128] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c5bb6ff9-5589-484f-a2ed-703382059fc6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.704194] env[69328]: DEBUG oslo_vmware.api [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Waiting for the task: (returnval){ [ 662.704194] env[69328]: value = "task-3272866" [ 662.704194] env[69328]: _type = "Task" [ 662.704194] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.714620] env[69328]: DEBUG oslo_vmware.api [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3272866, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.916027] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Acquiring lock "refresh_cache-5b0e8bef-dcfc-4c5e-89d2-aa1748050d29" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 662.916822] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Acquired lock "refresh_cache-5b0e8bef-dcfc-4c5e-89d2-aa1748050d29" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 662.916822] env[69328]: DEBUG nova.network.neutron [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 663.194309] env[69328]: DEBUG oslo_vmware.api [None req-714834f3-d2c5-4180-b8e5-d96dcaf30522 tempest-ServersAdminNegativeTestJSON-1019803066 tempest-ServersAdminNegativeTestJSON-1019803066-project-admin] Task: {'id': task-3272864, 'name': SuspendVM_Task, 'duration_secs': 1.041348} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.194309] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-714834f3-d2c5-4180-b8e5-d96dcaf30522 tempest-ServersAdminNegativeTestJSON-1019803066 tempest-ServersAdminNegativeTestJSON-1019803066-project-admin] [instance: 9753734d-90f0-4661-8029-ec312e88eb60] Suspended the VM {{(pid=69328) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 663.194831] env[69328]: DEBUG nova.compute.manager [None req-714834f3-d2c5-4180-b8e5-d96dcaf30522 tempest-ServersAdminNegativeTestJSON-1019803066 tempest-ServersAdminNegativeTestJSON-1019803066-project-admin] [instance: 9753734d-90f0-4661-8029-ec312e88eb60] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 663.195327] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5dcc72e-bf79-4935-aca8-cd92dad993aa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.219182] env[69328]: DEBUG oslo_vmware.api [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3272866, 'name': Rename_Task, 'duration_secs': 0.172014} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.219182] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 663.219182] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-65ea2b1c-1817-4727-833a-ceed17cd7d29 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.225846] env[69328]: DEBUG oslo_vmware.api [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Waiting for the task: (returnval){ [ 663.225846] env[69328]: value = "task-3272867" [ 663.225846] env[69328]: _type = "Task" [ 663.225846] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.235165] env[69328]: DEBUG oslo_vmware.api [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3272867, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.465055] env[69328]: DEBUG nova.network.neutron [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 663.558640] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e7ce0ab-48fd-465c-a3b9-d87fe33daf49 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.567689] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ba9d606-5004-401a-ab14-6b5d116f79e2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.604278] env[69328]: DEBUG oslo_concurrency.lockutils [None req-90aaef94-d57e-40dc-8d46-6941246a844e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Acquiring lock "d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 663.604698] env[69328]: DEBUG oslo_concurrency.lockutils [None req-90aaef94-d57e-40dc-8d46-6941246a844e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Lock "d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 663.604889] env[69328]: DEBUG nova.compute.manager [None req-90aaef94-d57e-40dc-8d46-6941246a844e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Going to confirm migration 1 {{(pid=69328) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 663.607720] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33d4cc93-3d2b-48a4-b28a-4cd61b02a792 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.616958] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c62ecd05-2a25-45ca-ba07-fdc46e601e51 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.635614] env[69328]: DEBUG nova.compute.provider_tree [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 663.742419] env[69328]: DEBUG oslo_vmware.api [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3272867, 'name': PowerOnVM_Task, 'duration_secs': 0.487379} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.742727] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 663.742942] env[69328]: INFO nova.compute.manager [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Took 9.86 seconds to spawn the instance on the hypervisor. [ 663.743195] env[69328]: DEBUG nova.compute.manager [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 663.744047] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5404dc00-4a70-4615-9b22-70859f12d1e2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.782135] env[69328]: DEBUG nova.network.neutron [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Updating instance_info_cache with network_info: [{"id": "6a14c441-36e5-4670-8f21-54b1113b23ff", "address": "fa:16:3e:f1:df:d8", "network": {"id": "4707aea3-ce46-4fe0-bf5c-141ee5596a86", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.225", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ecd184c6b78b4e4297fb93abb94aa37d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6a14c441-36", "ovs_interfaceid": "6a14c441-36e5-4670-8f21-54b1113b23ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 664.143061] env[69328]: DEBUG nova.scheduler.client.report [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 664.202965] env[69328]: DEBUG oslo_concurrency.lockutils [None req-90aaef94-d57e-40dc-8d46-6941246a844e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Acquiring lock "refresh_cache-d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 664.202965] env[69328]: DEBUG oslo_concurrency.lockutils [None req-90aaef94-d57e-40dc-8d46-6941246a844e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Acquired lock "refresh_cache-d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 664.202965] env[69328]: DEBUG nova.network.neutron [None req-90aaef94-d57e-40dc-8d46-6941246a844e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 664.202965] env[69328]: DEBUG nova.objects.instance [None req-90aaef94-d57e-40dc-8d46-6941246a844e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Lazy-loading 'info_cache' on Instance uuid d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 664.268086] env[69328]: INFO nova.compute.manager [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Took 42.51 seconds to build instance. [ 664.285162] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Releasing lock "refresh_cache-5b0e8bef-dcfc-4c5e-89d2-aa1748050d29" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 664.285640] env[69328]: DEBUG nova.compute.manager [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Instance network_info: |[{"id": "6a14c441-36e5-4670-8f21-54b1113b23ff", "address": "fa:16:3e:f1:df:d8", "network": {"id": "4707aea3-ce46-4fe0-bf5c-141ee5596a86", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.225", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ecd184c6b78b4e4297fb93abb94aa37d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6a14c441-36", "ovs_interfaceid": "6a14c441-36e5-4670-8f21-54b1113b23ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 664.286242] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f1:df:d8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a8b99a46-3e7f-4ef1-9e45-58e6cd17f210', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6a14c441-36e5-4670-8f21-54b1113b23ff', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 664.296358] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Creating folder: Project (641bd900bcd6477f9ec6a026cf00e42f). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 664.297833] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1540e199-6753-4387-a150-1add3ef4244d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.306971] env[69328]: DEBUG nova.compute.manager [req-9ba084fb-6544-474d-9f0d-e56759cea8c7 req-99804230-bb4c-4909-8189-2d85d739c2a3 service nova] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Received event network-changed-6a14c441-36e5-4670-8f21-54b1113b23ff {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 664.307174] env[69328]: DEBUG nova.compute.manager [req-9ba084fb-6544-474d-9f0d-e56759cea8c7 req-99804230-bb4c-4909-8189-2d85d739c2a3 service nova] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Refreshing instance network info cache due to event network-changed-6a14c441-36e5-4670-8f21-54b1113b23ff. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 664.307387] env[69328]: DEBUG oslo_concurrency.lockutils [req-9ba084fb-6544-474d-9f0d-e56759cea8c7 req-99804230-bb4c-4909-8189-2d85d739c2a3 service nova] Acquiring lock "refresh_cache-5b0e8bef-dcfc-4c5e-89d2-aa1748050d29" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 664.307487] env[69328]: DEBUG oslo_concurrency.lockutils [req-9ba084fb-6544-474d-9f0d-e56759cea8c7 req-99804230-bb4c-4909-8189-2d85d739c2a3 service nova] Acquired lock "refresh_cache-5b0e8bef-dcfc-4c5e-89d2-aa1748050d29" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 664.307641] env[69328]: DEBUG nova.network.neutron [req-9ba084fb-6544-474d-9f0d-e56759cea8c7 req-99804230-bb4c-4909-8189-2d85d739c2a3 service nova] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Refreshing network info cache for port 6a14c441-36e5-4670-8f21-54b1113b23ff {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 664.315095] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Created folder: Project (641bd900bcd6477f9ec6a026cf00e42f) in parent group-v653649. [ 664.315304] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Creating folder: Instances. Parent ref: group-v653713. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 664.316601] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3ca550f0-bff3-417b-aa74-bc8e061fab84 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.331052] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Created folder: Instances in parent group-v653713. [ 664.331283] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 664.331489] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 664.333602] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b8861b4a-4d9d-4433-b614-8dc604bc9d45 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.356805] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 664.356805] env[69328]: value = "task-3272870" [ 664.356805] env[69328]: _type = "Task" [ 664.356805] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.370311] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272870, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.652650] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.688s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 664.653263] env[69328]: DEBUG nova.compute.manager [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 664.656801] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.822s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 664.660852] env[69328]: INFO nova.compute.claims [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 664.770707] env[69328]: DEBUG oslo_concurrency.lockutils [None req-44e0742a-6c68-43e3-994e-86d801dabd64 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Lock "15a8de08-4d20-4329-9867-53e5dff82878" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.553s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 664.866523] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272870, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.081146] env[69328]: DEBUG nova.network.neutron [req-9ba084fb-6544-474d-9f0d-e56759cea8c7 req-99804230-bb4c-4909-8189-2d85d739c2a3 service nova] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Updated VIF entry in instance network info cache for port 6a14c441-36e5-4670-8f21-54b1113b23ff. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 665.081656] env[69328]: DEBUG nova.network.neutron [req-9ba084fb-6544-474d-9f0d-e56759cea8c7 req-99804230-bb4c-4909-8189-2d85d739c2a3 service nova] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Updating instance_info_cache with network_info: [{"id": "6a14c441-36e5-4670-8f21-54b1113b23ff", "address": "fa:16:3e:f1:df:d8", "network": {"id": "4707aea3-ce46-4fe0-bf5c-141ee5596a86", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.225", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ecd184c6b78b4e4297fb93abb94aa37d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6a14c441-36", "ovs_interfaceid": "6a14c441-36e5-4670-8f21-54b1113b23ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 665.166908] env[69328]: DEBUG nova.compute.utils [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 665.172621] env[69328]: DEBUG nova.compute.manager [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 665.175790] env[69328]: DEBUG nova.network.neutron [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 665.268870] env[69328]: DEBUG nova.policy [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8a33685c1b8e4b4a91cdfea0583ad5a4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b6c6d4b6915e4660b1ba7704912654c1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 665.273838] env[69328]: DEBUG nova.compute.manager [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 665.371091] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272870, 'name': CreateVM_Task, 'duration_secs': 0.532845} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.371440] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 665.373023] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.373023] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 665.373023] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 665.373023] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d886ae30-4e19-4244-81aa-b1fdae4fd545 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.379224] env[69328]: DEBUG oslo_vmware.api [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Waiting for the task: (returnval){ [ 665.379224] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5222270f-6f77-9857-1af6-5d152b6449b0" [ 665.379224] env[69328]: _type = "Task" [ 665.379224] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.391600] env[69328]: DEBUG oslo_vmware.api [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5222270f-6f77-9857-1af6-5d152b6449b0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.481521] env[69328]: DEBUG nova.network.neutron [None req-90aaef94-d57e-40dc-8d46-6941246a844e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Updating instance_info_cache with network_info: [{"id": "cbf38f9d-1507-45bb-9684-bf804c86b93b", "address": "fa:16:3e:a1:39:be", "network": {"id": "4707aea3-ce46-4fe0-bf5c-141ee5596a86", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.236", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ecd184c6b78b4e4297fb93abb94aa37d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbf38f9d-15", "ovs_interfaceid": "cbf38f9d-1507-45bb-9684-bf804c86b93b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 665.544102] env[69328]: INFO nova.compute.manager [None req-01e2ac89-d32a-4ec8-b52a-462827e352bc tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Rescuing [ 665.544376] env[69328]: DEBUG oslo_concurrency.lockutils [None req-01e2ac89-d32a-4ec8-b52a-462827e352bc tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Acquiring lock "refresh_cache-15a8de08-4d20-4329-9867-53e5dff82878" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.544529] env[69328]: DEBUG oslo_concurrency.lockutils [None req-01e2ac89-d32a-4ec8-b52a-462827e352bc tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Acquired lock "refresh_cache-15a8de08-4d20-4329-9867-53e5dff82878" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 665.544691] env[69328]: DEBUG nova.network.neutron [None req-01e2ac89-d32a-4ec8-b52a-462827e352bc tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 665.584912] env[69328]: DEBUG oslo_concurrency.lockutils [req-9ba084fb-6544-474d-9f0d-e56759cea8c7 req-99804230-bb4c-4909-8189-2d85d739c2a3 service nova] Releasing lock "refresh_cache-5b0e8bef-dcfc-4c5e-89d2-aa1748050d29" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 665.624278] env[69328]: DEBUG nova.compute.manager [None req-ddde05fb-1d3a-49dd-b167-da0107f56fed tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 665.625525] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa1d79ae-c7d8-4b00-be06-131c3406a7cc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.678505] env[69328]: DEBUG nova.compute.manager [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 665.745751] env[69328]: DEBUG nova.network.neutron [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] Successfully created port: eb83e86c-619b-4c67-a535-7ecc49d15ff2 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 665.794219] env[69328]: DEBUG oslo_concurrency.lockutils [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 665.892336] env[69328]: DEBUG oslo_vmware.api [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5222270f-6f77-9857-1af6-5d152b6449b0, 'name': SearchDatastore_Task, 'duration_secs': 0.034061} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.895035] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 665.895268] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 665.895501] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.895646] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 665.895817] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 665.896966] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-95ef037b-4157-4fa6-9a3a-822f79419e22 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.907533] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 665.908425] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 665.911443] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aaffb3dc-01b8-4d53-a3c8-13c1ef9e7686 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.918300] env[69328]: DEBUG oslo_vmware.api [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Waiting for the task: (returnval){ [ 665.918300] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52036ea4-04d9-ed6b-8951-43f7c88c8aa9" [ 665.918300] env[69328]: _type = "Task" [ 665.918300] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.927825] env[69328]: DEBUG oslo_vmware.api [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52036ea4-04d9-ed6b-8951-43f7c88c8aa9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.987846] env[69328]: DEBUG oslo_concurrency.lockutils [None req-90aaef94-d57e-40dc-8d46-6941246a844e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Releasing lock "refresh_cache-d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 665.987846] env[69328]: DEBUG nova.objects.instance [None req-90aaef94-d57e-40dc-8d46-6941246a844e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Lazy-loading 'migration_context' on Instance uuid d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 666.138898] env[69328]: INFO nova.compute.manager [None req-ddde05fb-1d3a-49dd-b167-da0107f56fed tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] instance snapshotting [ 666.144020] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac86e5d4-38cd-41b6-93c7-5e0a24a03a7f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.182916] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bbc9d7a-6983-4fad-a8a7-a2005e0b68a3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.264171] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5f8b477-7c23-4827-a007-649c252d9b50 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.273633] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b05dd63-85c3-4840-beb3-3e8f06210429 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.312180] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3269235f-a588-4bef-a234-54b957efe6cb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.320905] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ebcc3ca-45c5-4a29-b0c9-a47c7c5f8ece {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.338043] env[69328]: DEBUG oslo_concurrency.lockutils [None req-aa19d4f8-aed4-4937-81d3-93f3297da951 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Acquiring lock "9753734d-90f0-4661-8029-ec312e88eb60" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 666.338179] env[69328]: DEBUG oslo_concurrency.lockutils [None req-aa19d4f8-aed4-4937-81d3-93f3297da951 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Lock "9753734d-90f0-4661-8029-ec312e88eb60" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 666.338391] env[69328]: DEBUG oslo_concurrency.lockutils [None req-aa19d4f8-aed4-4937-81d3-93f3297da951 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Acquiring lock "9753734d-90f0-4661-8029-ec312e88eb60-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 666.338594] env[69328]: DEBUG oslo_concurrency.lockutils [None req-aa19d4f8-aed4-4937-81d3-93f3297da951 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Lock "9753734d-90f0-4661-8029-ec312e88eb60-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 666.338767] env[69328]: DEBUG oslo_concurrency.lockutils [None req-aa19d4f8-aed4-4937-81d3-93f3297da951 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Lock "9753734d-90f0-4661-8029-ec312e88eb60-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 666.340779] env[69328]: DEBUG nova.compute.provider_tree [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 666.342167] env[69328]: INFO nova.compute.manager [None req-aa19d4f8-aed4-4937-81d3-93f3297da951 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 9753734d-90f0-4661-8029-ec312e88eb60] Terminating instance [ 666.397278] env[69328]: DEBUG nova.network.neutron [None req-01e2ac89-d32a-4ec8-b52a-462827e352bc tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Updating instance_info_cache with network_info: [{"id": "f659c974-1a37-4e6b-bbff-d8a0858a6756", "address": "fa:16:3e:73:52:6a", "network": {"id": "032910e6-4d2e-415b-ac3e-ed7a7fadf536", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1432969230-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "690511a8725a4dd6ab796a15569293a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "be5c038c-29e5-43c9-91ab-9eb3094b5337", "external-id": "nsx-vlan-transportzone-511", "segmentation_id": 511, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf659c974-1a", "ovs_interfaceid": "f659c974-1a37-4e6b-bbff-d8a0858a6756", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 666.429690] env[69328]: DEBUG oslo_vmware.api [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52036ea4-04d9-ed6b-8951-43f7c88c8aa9, 'name': SearchDatastore_Task, 'duration_secs': 0.017274} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.430559] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-872aef7c-c2d4-495a-98bb-ef9cdf6436b8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.436244] env[69328]: DEBUG oslo_vmware.api [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Waiting for the task: (returnval){ [ 666.436244] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5219b071-a005-e740-a2d7-49f20c4baeee" [ 666.436244] env[69328]: _type = "Task" [ 666.436244] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.445819] env[69328]: DEBUG oslo_vmware.api [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5219b071-a005-e740-a2d7-49f20c4baeee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.489810] env[69328]: DEBUG nova.objects.base [None req-90aaef94-d57e-40dc-8d46-6941246a844e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=69328) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 666.490795] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b32a38a3-a6ee-4f3b-8c36-7d0b85f2e99f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.512968] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cbe1b01c-78e4-46ee-ae80-c5f363b49364 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.519772] env[69328]: DEBUG oslo_vmware.api [None req-90aaef94-d57e-40dc-8d46-6941246a844e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Waiting for the task: (returnval){ [ 666.519772] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]526d409b-9f4e-a973-11a5-ebb7ba440144" [ 666.519772] env[69328]: _type = "Task" [ 666.519772] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.529196] env[69328]: DEBUG oslo_vmware.api [None req-90aaef94-d57e-40dc-8d46-6941246a844e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]526d409b-9f4e-a973-11a5-ebb7ba440144, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.692501] env[69328]: DEBUG nova.compute.manager [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 666.703269] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ddde05fb-1d3a-49dd-b167-da0107f56fed tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Creating Snapshot of the VM instance {{(pid=69328) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 666.703269] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-5b02acfc-f3e3-4a19-83d4-25aad506ab57 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.714338] env[69328]: DEBUG nova.virt.hardware [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 666.714637] env[69328]: DEBUG nova.virt.hardware [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 666.714839] env[69328]: DEBUG nova.virt.hardware [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 666.715116] env[69328]: DEBUG nova.virt.hardware [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 666.716031] env[69328]: DEBUG nova.virt.hardware [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 666.716031] env[69328]: DEBUG nova.virt.hardware [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 666.716396] env[69328]: DEBUG nova.virt.hardware [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 666.716635] env[69328]: DEBUG nova.virt.hardware [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 666.716894] env[69328]: DEBUG nova.virt.hardware [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 666.717128] env[69328]: DEBUG nova.virt.hardware [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 666.717429] env[69328]: DEBUG nova.virt.hardware [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 666.718440] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2366606-0728-4ee1-8ebe-cc3feaa851b1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.722776] env[69328]: DEBUG oslo_vmware.api [None req-ddde05fb-1d3a-49dd-b167-da0107f56fed tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Waiting for the task: (returnval){ [ 666.722776] env[69328]: value = "task-3272875" [ 666.722776] env[69328]: _type = "Task" [ 666.722776] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.731388] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de0c037a-3576-4c58-a975-e01830016a1d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.739633] env[69328]: DEBUG oslo_vmware.api [None req-ddde05fb-1d3a-49dd-b167-da0107f56fed tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': task-3272875, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.849951] env[69328]: DEBUG nova.scheduler.client.report [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 666.856026] env[69328]: DEBUG nova.compute.manager [None req-aa19d4f8-aed4-4937-81d3-93f3297da951 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 9753734d-90f0-4661-8029-ec312e88eb60] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 666.856463] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-aa19d4f8-aed4-4937-81d3-93f3297da951 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 9753734d-90f0-4661-8029-ec312e88eb60] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 666.859797] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7188ecc3-a341-4ae6-9f6f-addc398441fa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.871062] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-aa19d4f8-aed4-4937-81d3-93f3297da951 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 9753734d-90f0-4661-8029-ec312e88eb60] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 666.871860] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3a33e1e6-b20f-4d11-aee4-ad9ee0f5ab2e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.900934] env[69328]: DEBUG oslo_concurrency.lockutils [None req-01e2ac89-d32a-4ec8-b52a-462827e352bc tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Releasing lock "refresh_cache-15a8de08-4d20-4329-9867-53e5dff82878" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 666.949473] env[69328]: DEBUG oslo_vmware.api [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5219b071-a005-e740-a2d7-49f20c4baeee, 'name': SearchDatastore_Task, 'duration_secs': 0.014973} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.951042] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 666.951355] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29/5b0e8bef-dcfc-4c5e-89d2-aa1748050d29.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 666.951697] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-aa19d4f8-aed4-4937-81d3-93f3297da951 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 9753734d-90f0-4661-8029-ec312e88eb60] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 666.951881] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-aa19d4f8-aed4-4937-81d3-93f3297da951 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 9753734d-90f0-4661-8029-ec312e88eb60] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 666.952062] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa19d4f8-aed4-4937-81d3-93f3297da951 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Deleting the datastore file [datastore2] 9753734d-90f0-4661-8029-ec312e88eb60 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 666.952297] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-431d6d46-28dd-4850-a010-f26e25d192ed {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.954485] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-59a9eabd-b294-4ab5-a08b-99dcfcfb3876 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.962282] env[69328]: DEBUG oslo_vmware.api [None req-aa19d4f8-aed4-4937-81d3-93f3297da951 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Waiting for the task: (returnval){ [ 666.962282] env[69328]: value = "task-3272878" [ 666.962282] env[69328]: _type = "Task" [ 666.962282] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.963551] env[69328]: DEBUG oslo_vmware.api [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Waiting for the task: (returnval){ [ 666.963551] env[69328]: value = "task-3272877" [ 666.963551] env[69328]: _type = "Task" [ 666.963551] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.977229] env[69328]: DEBUG oslo_vmware.api [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3272877, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.980902] env[69328]: DEBUG oslo_vmware.api [None req-aa19d4f8-aed4-4937-81d3-93f3297da951 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Task: {'id': task-3272878, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.032448] env[69328]: DEBUG oslo_vmware.api [None req-90aaef94-d57e-40dc-8d46-6941246a844e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]526d409b-9f4e-a973-11a5-ebb7ba440144, 'name': SearchDatastore_Task, 'duration_secs': 0.00854} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.032938] env[69328]: DEBUG oslo_concurrency.lockutils [None req-90aaef94-d57e-40dc-8d46-6941246a844e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 667.237079] env[69328]: DEBUG oslo_vmware.api [None req-ddde05fb-1d3a-49dd-b167-da0107f56fed tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': task-3272875, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.360030] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.702s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 667.360030] env[69328]: DEBUG nova.compute.manager [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 667.362292] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.363s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 667.367427] env[69328]: INFO nova.compute.claims [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 667.402786] env[69328]: DEBUG nova.network.neutron [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] Successfully updated port: eb83e86c-619b-4c67-a535-7ecc49d15ff2 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 667.422021] env[69328]: DEBUG nova.compute.manager [req-9cde99c8-0d58-4442-b667-3f1a999bdc8f req-5fd7eb18-10a3-43d6-81a6-8b76f6f7fb56 service nova] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] Received event network-vif-plugged-eb83e86c-619b-4c67-a535-7ecc49d15ff2 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 667.426208] env[69328]: DEBUG oslo_concurrency.lockutils [req-9cde99c8-0d58-4442-b667-3f1a999bdc8f req-5fd7eb18-10a3-43d6-81a6-8b76f6f7fb56 service nova] Acquiring lock "84baf472-6eb5-4c92-98eb-e35c14bca4e2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 667.426208] env[69328]: DEBUG oslo_concurrency.lockutils [req-9cde99c8-0d58-4442-b667-3f1a999bdc8f req-5fd7eb18-10a3-43d6-81a6-8b76f6f7fb56 service nova] Lock "84baf472-6eb5-4c92-98eb-e35c14bca4e2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 667.426208] env[69328]: DEBUG oslo_concurrency.lockutils [req-9cde99c8-0d58-4442-b667-3f1a999bdc8f req-5fd7eb18-10a3-43d6-81a6-8b76f6f7fb56 service nova] Lock "84baf472-6eb5-4c92-98eb-e35c14bca4e2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 667.426208] env[69328]: DEBUG nova.compute.manager [req-9cde99c8-0d58-4442-b667-3f1a999bdc8f req-5fd7eb18-10a3-43d6-81a6-8b76f6f7fb56 service nova] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] No waiting events found dispatching network-vif-plugged-eb83e86c-619b-4c67-a535-7ecc49d15ff2 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 667.426208] env[69328]: WARNING nova.compute.manager [req-9cde99c8-0d58-4442-b667-3f1a999bdc8f req-5fd7eb18-10a3-43d6-81a6-8b76f6f7fb56 service nova] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] Received unexpected event network-vif-plugged-eb83e86c-619b-4c67-a535-7ecc49d15ff2 for instance with vm_state building and task_state spawning. [ 667.479493] env[69328]: DEBUG oslo_vmware.api [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3272877, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.483442] env[69328]: DEBUG oslo_vmware.api [None req-aa19d4f8-aed4-4937-81d3-93f3297da951 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Task: {'id': task-3272878, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.195132} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.483830] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa19d4f8-aed4-4937-81d3-93f3297da951 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 667.484061] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-aa19d4f8-aed4-4937-81d3-93f3297da951 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 9753734d-90f0-4661-8029-ec312e88eb60] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 667.484411] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-aa19d4f8-aed4-4937-81d3-93f3297da951 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 9753734d-90f0-4661-8029-ec312e88eb60] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 667.484647] env[69328]: INFO nova.compute.manager [None req-aa19d4f8-aed4-4937-81d3-93f3297da951 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 9753734d-90f0-4661-8029-ec312e88eb60] Took 0.63 seconds to destroy the instance on the hypervisor. [ 667.484995] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-aa19d4f8-aed4-4937-81d3-93f3297da951 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 667.485238] env[69328]: DEBUG nova.compute.manager [-] [instance: 9753734d-90f0-4661-8029-ec312e88eb60] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 667.485336] env[69328]: DEBUG nova.network.neutron [-] [instance: 9753734d-90f0-4661-8029-ec312e88eb60] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 667.736679] env[69328]: DEBUG oslo_vmware.api [None req-ddde05fb-1d3a-49dd-b167-da0107f56fed tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': task-3272875, 'name': CreateSnapshot_Task, 'duration_secs': 0.68078} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.737010] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ddde05fb-1d3a-49dd-b167-da0107f56fed tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Created Snapshot of the VM instance {{(pid=69328) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 667.737930] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4891e3ee-cabf-4f66-a5d1-728cd7473e2e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.869898] env[69328]: DEBUG nova.compute.utils [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 667.874091] env[69328]: DEBUG nova.compute.manager [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 667.874271] env[69328]: DEBUG nova.network.neutron [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 667.905730] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Acquiring lock "refresh_cache-84baf472-6eb5-4c92-98eb-e35c14bca4e2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 667.932475] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Acquired lock "refresh_cache-84baf472-6eb5-4c92-98eb-e35c14bca4e2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 667.932475] env[69328]: DEBUG nova.network.neutron [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 667.932475] env[69328]: DEBUG nova.policy [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '929ab12fcdb943a48039c7508e6a0b35', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '088bc9e3aeb449baa0a522342d57d183', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 667.991025] env[69328]: DEBUG oslo_vmware.api [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3272877, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.61779} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.991577] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29/5b0e8bef-dcfc-4c5e-89d2-aa1748050d29.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 667.991854] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 667.992149] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-70ac518c-f4ab-4dfd-a5a5-198bc4b01bc4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.005645] env[69328]: DEBUG oslo_vmware.api [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Waiting for the task: (returnval){ [ 668.005645] env[69328]: value = "task-3272880" [ 668.005645] env[69328]: _type = "Task" [ 668.005645] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.019542] env[69328]: DEBUG oslo_vmware.api [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3272880, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.257020] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ddde05fb-1d3a-49dd-b167-da0107f56fed tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Creating linked-clone VM from snapshot {{(pid=69328) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 668.257365] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-c8a88f6a-f6ae-46d1-8d80-e800c733830d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.267816] env[69328]: DEBUG oslo_vmware.api [None req-ddde05fb-1d3a-49dd-b167-da0107f56fed tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Waiting for the task: (returnval){ [ 668.267816] env[69328]: value = "task-3272881" [ 668.267816] env[69328]: _type = "Task" [ 668.267816] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.277762] env[69328]: DEBUG oslo_vmware.api [None req-ddde05fb-1d3a-49dd-b167-da0107f56fed tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': task-3272881, 'name': CloneVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.290805] env[69328]: DEBUG nova.network.neutron [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Successfully created port: e8f19fa7-2ac8-47ea-bd97-b8bcdc8f3ed6 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 668.299411] env[69328]: DEBUG nova.network.neutron [-] [instance: 9753734d-90f0-4661-8029-ec312e88eb60] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 668.380608] env[69328]: DEBUG nova.compute.manager [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 668.444693] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-01e2ac89-d32a-4ec8-b52a-462827e352bc tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 668.445112] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ad4aa3f2-579b-40da-a881-3dcf855712da {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.454284] env[69328]: DEBUG oslo_vmware.api [None req-01e2ac89-d32a-4ec8-b52a-462827e352bc tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Waiting for the task: (returnval){ [ 668.454284] env[69328]: value = "task-3272882" [ 668.454284] env[69328]: _type = "Task" [ 668.454284] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.458285] env[69328]: DEBUG nova.network.neutron [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 668.475980] env[69328]: DEBUG oslo_vmware.api [None req-01e2ac89-d32a-4ec8-b52a-462827e352bc tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3272882, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.518299] env[69328]: DEBUG oslo_vmware.api [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3272880, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.092261} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.521364] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 668.527148] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80584929-ffc7-4ac8-b369-eb57bab82ade {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.554743] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Reconfiguring VM instance instance-00000016 to attach disk [datastore2] 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29/5b0e8bef-dcfc-4c5e-89d2-aa1748050d29.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 668.558075] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b00d8ef0-da65-4faf-95a9-a8b8caf21280 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.582566] env[69328]: DEBUG oslo_vmware.api [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Waiting for the task: (returnval){ [ 668.582566] env[69328]: value = "task-3272883" [ 668.582566] env[69328]: _type = "Task" [ 668.582566] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.591430] env[69328]: DEBUG oslo_vmware.api [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3272883, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.702363] env[69328]: DEBUG nova.network.neutron [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] Updating instance_info_cache with network_info: [{"id": "eb83e86c-619b-4c67-a535-7ecc49d15ff2", "address": "fa:16:3e:14:ca:62", "network": {"id": "4707aea3-ce46-4fe0-bf5c-141ee5596a86", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.209", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ecd184c6b78b4e4297fb93abb94aa37d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb83e86c-61", "ovs_interfaceid": "eb83e86c-619b-4c67-a535-7ecc49d15ff2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 668.778821] env[69328]: DEBUG oslo_vmware.api [None req-ddde05fb-1d3a-49dd-b167-da0107f56fed tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': task-3272881, 'name': CloneVM_Task} progress is 94%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.801336] env[69328]: INFO nova.compute.manager [-] [instance: 9753734d-90f0-4661-8029-ec312e88eb60] Took 1.32 seconds to deallocate network for instance. [ 668.919056] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c2635a7-c44a-4b55-85fd-c78e11763bc9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.927754] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1150acef-59bd-4f74-bdde-32f61d7ed2c3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.963720] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-208f94f3-61c8-4e62-9f1c-abd0e0b6c748 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.971233] env[69328]: DEBUG oslo_vmware.api [None req-01e2ac89-d32a-4ec8-b52a-462827e352bc tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3272882, 'name': PowerOffVM_Task, 'duration_secs': 0.252204} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.973488] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-01e2ac89-d32a-4ec8-b52a-462827e352bc tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 668.974326] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6624142-8d7a-433e-b5f9-a21a2f7614d0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.977868] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4325491f-80a1-464e-b069-fbf352ccc481 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.006922] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c04a0e1-910a-439b-b972-e230e195438a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.009933] env[69328]: DEBUG nova.compute.provider_tree [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 669.036889] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-01e2ac89-d32a-4ec8-b52a-462827e352bc tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 669.036986] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e92d64b1-b226-4684-8683-acfdd313c0be {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.045150] env[69328]: DEBUG oslo_vmware.api [None req-01e2ac89-d32a-4ec8-b52a-462827e352bc tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Waiting for the task: (returnval){ [ 669.045150] env[69328]: value = "task-3272884" [ 669.045150] env[69328]: _type = "Task" [ 669.045150] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.053446] env[69328]: DEBUG oslo_vmware.api [None req-01e2ac89-d32a-4ec8-b52a-462827e352bc tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3272884, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.095240] env[69328]: DEBUG oslo_vmware.api [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3272883, 'name': ReconfigVM_Task, 'duration_secs': 0.325391} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.095569] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Reconfigured VM instance instance-00000016 to attach disk [datastore2] 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29/5b0e8bef-dcfc-4c5e-89d2-aa1748050d29.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 669.096289] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2e4e59ff-7274-4c58-a3ec-1b8bb93a23e0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.103070] env[69328]: DEBUG oslo_vmware.api [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Waiting for the task: (returnval){ [ 669.103070] env[69328]: value = "task-3272885" [ 669.103070] env[69328]: _type = "Task" [ 669.103070] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.111843] env[69328]: DEBUG oslo_vmware.api [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3272885, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.204265] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Releasing lock "refresh_cache-84baf472-6eb5-4c92-98eb-e35c14bca4e2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 669.204728] env[69328]: DEBUG nova.compute.manager [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] Instance network_info: |[{"id": "eb83e86c-619b-4c67-a535-7ecc49d15ff2", "address": "fa:16:3e:14:ca:62", "network": {"id": "4707aea3-ce46-4fe0-bf5c-141ee5596a86", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.209", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ecd184c6b78b4e4297fb93abb94aa37d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb83e86c-61", "ovs_interfaceid": "eb83e86c-619b-4c67-a535-7ecc49d15ff2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 669.205343] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:14:ca:62', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a8b99a46-3e7f-4ef1-9e45-58e6cd17f210', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eb83e86c-619b-4c67-a535-7ecc49d15ff2', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 669.214476] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Creating folder: Project (b6c6d4b6915e4660b1ba7704912654c1). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 669.214824] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c2eae425-3112-4979-bb70-b8fd342e6d94 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.226121] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Created folder: Project (b6c6d4b6915e4660b1ba7704912654c1) in parent group-v653649. [ 669.226376] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Creating folder: Instances. Parent ref: group-v653721. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 669.226667] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c75aa308-1ace-430a-a227-7bd93b3ea061 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.236189] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Created folder: Instances in parent group-v653721. [ 669.236439] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 669.236635] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 669.236860] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a08d5169-e309-448a-a37e-ca26ac8ab5a8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.259024] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 669.259024] env[69328]: value = "task-3272888" [ 669.259024] env[69328]: _type = "Task" [ 669.259024] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.264062] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272888, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.277915] env[69328]: DEBUG oslo_vmware.api [None req-ddde05fb-1d3a-49dd-b167-da0107f56fed tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': task-3272881, 'name': CloneVM_Task} progress is 94%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.313903] env[69328]: DEBUG oslo_concurrency.lockutils [None req-aa19d4f8-aed4-4937-81d3-93f3297da951 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 669.389519] env[69328]: DEBUG nova.compute.manager [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 669.410496] env[69328]: DEBUG nova.virt.hardware [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 669.410632] env[69328]: DEBUG nova.virt.hardware [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 669.410829] env[69328]: DEBUG nova.virt.hardware [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 669.411028] env[69328]: DEBUG nova.virt.hardware [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 669.411115] env[69328]: DEBUG nova.virt.hardware [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 669.411260] env[69328]: DEBUG nova.virt.hardware [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 669.411586] env[69328]: DEBUG nova.virt.hardware [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 669.414709] env[69328]: DEBUG nova.virt.hardware [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 669.414709] env[69328]: DEBUG nova.virt.hardware [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 669.414709] env[69328]: DEBUG nova.virt.hardware [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 669.414709] env[69328]: DEBUG nova.virt.hardware [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 669.414709] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f62709f0-e6e0-4171-96b3-8653e1282915 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.421730] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f1f12e4-af9b-4406-9d13-3e0cdbe2e4fa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.513632] env[69328]: DEBUG nova.scheduler.client.report [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 669.536497] env[69328]: DEBUG nova.compute.manager [req-42dd9251-a527-405c-822b-0c8e14e7a0f6 req-dffe6145-a594-4401-8d32-0624d3282a1d service nova] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] Received event network-changed-eb83e86c-619b-4c67-a535-7ecc49d15ff2 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 669.537097] env[69328]: DEBUG nova.compute.manager [req-42dd9251-a527-405c-822b-0c8e14e7a0f6 req-dffe6145-a594-4401-8d32-0624d3282a1d service nova] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] Refreshing instance network info cache due to event network-changed-eb83e86c-619b-4c67-a535-7ecc49d15ff2. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 669.537097] env[69328]: DEBUG oslo_concurrency.lockutils [req-42dd9251-a527-405c-822b-0c8e14e7a0f6 req-dffe6145-a594-4401-8d32-0624d3282a1d service nova] Acquiring lock "refresh_cache-84baf472-6eb5-4c92-98eb-e35c14bca4e2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 669.537097] env[69328]: DEBUG oslo_concurrency.lockutils [req-42dd9251-a527-405c-822b-0c8e14e7a0f6 req-dffe6145-a594-4401-8d32-0624d3282a1d service nova] Acquired lock "refresh_cache-84baf472-6eb5-4c92-98eb-e35c14bca4e2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 669.537265] env[69328]: DEBUG nova.network.neutron [req-42dd9251-a527-405c-822b-0c8e14e7a0f6 req-dffe6145-a594-4401-8d32-0624d3282a1d service nova] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] Refreshing network info cache for port eb83e86c-619b-4c67-a535-7ecc49d15ff2 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 669.556710] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-01e2ac89-d32a-4ec8-b52a-462827e352bc tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] VM already powered off {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 669.556925] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-01e2ac89-d32a-4ec8-b52a-462827e352bc tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 669.557184] env[69328]: DEBUG oslo_concurrency.lockutils [None req-01e2ac89-d32a-4ec8-b52a-462827e352bc tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 669.557340] env[69328]: DEBUG oslo_concurrency.lockutils [None req-01e2ac89-d32a-4ec8-b52a-462827e352bc tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 669.557518] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-01e2ac89-d32a-4ec8-b52a-462827e352bc tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 669.557758] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1c2238f3-4b55-4953-8536-6f1006e0d985 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.567037] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-01e2ac89-d32a-4ec8-b52a-462827e352bc tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 669.567231] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-01e2ac89-d32a-4ec8-b52a-462827e352bc tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 669.567948] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fbf73e97-1bec-4c59-be05-d8351252313d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.574221] env[69328]: DEBUG oslo_vmware.api [None req-01e2ac89-d32a-4ec8-b52a-462827e352bc tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Waiting for the task: (returnval){ [ 669.574221] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5269e63e-5068-29e1-bdf0-e5e1a4606ed4" [ 669.574221] env[69328]: _type = "Task" [ 669.574221] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.582349] env[69328]: DEBUG oslo_vmware.api [None req-01e2ac89-d32a-4ec8-b52a-462827e352bc tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5269e63e-5068-29e1-bdf0-e5e1a4606ed4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.613387] env[69328]: DEBUG oslo_vmware.api [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3272885, 'name': Rename_Task, 'duration_secs': 0.138488} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.613684] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 669.613957] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5e5f4d55-cd42-4d6c-ba3b-2162a5b8e907 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.622198] env[69328]: DEBUG oslo_vmware.api [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Waiting for the task: (returnval){ [ 669.622198] env[69328]: value = "task-3272890" [ 669.622198] env[69328]: _type = "Task" [ 669.622198] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.631583] env[69328]: DEBUG oslo_vmware.api [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3272890, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.766103] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272888, 'name': CreateVM_Task} progress is 25%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.778974] env[69328]: DEBUG oslo_vmware.api [None req-ddde05fb-1d3a-49dd-b167-da0107f56fed tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': task-3272881, 'name': CloneVM_Task} progress is 95%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.019307] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.657s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 670.019841] env[69328]: DEBUG nova.compute.manager [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 670.023086] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.402s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 670.024737] env[69328]: INFO nova.compute.claims [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] [instance: c3673531-9167-4d33-b8ce-d6afa5e589bc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 670.091789] env[69328]: DEBUG oslo_vmware.api [None req-01e2ac89-d32a-4ec8-b52a-462827e352bc tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5269e63e-5068-29e1-bdf0-e5e1a4606ed4, 'name': SearchDatastore_Task, 'duration_secs': 0.009745} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.093606] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0c82f143-8c2a-439d-9c81-a4cac6201e8c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.104324] env[69328]: DEBUG oslo_vmware.api [None req-01e2ac89-d32a-4ec8-b52a-462827e352bc tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Waiting for the task: (returnval){ [ 670.104324] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]523ccaac-cc20-c515-2f86-d77d813f8237" [ 670.104324] env[69328]: _type = "Task" [ 670.104324] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.115525] env[69328]: DEBUG oslo_vmware.api [None req-01e2ac89-d32a-4ec8-b52a-462827e352bc tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]523ccaac-cc20-c515-2f86-d77d813f8237, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.135562] env[69328]: DEBUG oslo_vmware.api [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3272890, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.245992] env[69328]: DEBUG nova.network.neutron [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Successfully updated port: e8f19fa7-2ac8-47ea-bd97-b8bcdc8f3ed6 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 670.268523] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272888, 'name': CreateVM_Task} progress is 25%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.279211] env[69328]: DEBUG oslo_vmware.api [None req-ddde05fb-1d3a-49dd-b167-da0107f56fed tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': task-3272881, 'name': CloneVM_Task} progress is 95%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.454470] env[69328]: DEBUG nova.network.neutron [req-42dd9251-a527-405c-822b-0c8e14e7a0f6 req-dffe6145-a594-4401-8d32-0624d3282a1d service nova] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] Updated VIF entry in instance network info cache for port eb83e86c-619b-4c67-a535-7ecc49d15ff2. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 670.454780] env[69328]: DEBUG nova.network.neutron [req-42dd9251-a527-405c-822b-0c8e14e7a0f6 req-dffe6145-a594-4401-8d32-0624d3282a1d service nova] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] Updating instance_info_cache with network_info: [{"id": "eb83e86c-619b-4c67-a535-7ecc49d15ff2", "address": "fa:16:3e:14:ca:62", "network": {"id": "4707aea3-ce46-4fe0-bf5c-141ee5596a86", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.209", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ecd184c6b78b4e4297fb93abb94aa37d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb83e86c-61", "ovs_interfaceid": "eb83e86c-619b-4c67-a535-7ecc49d15ff2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 670.529223] env[69328]: DEBUG nova.compute.utils [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 670.532942] env[69328]: DEBUG nova.compute.manager [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 670.533127] env[69328]: DEBUG nova.network.neutron [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 671.218432] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquiring lock "refresh_cache-bc9c3a41-7264-4d69-bc15-397b5fa0a8ad" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 671.218618] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquired lock "refresh_cache-bc9c3a41-7264-4d69-bc15-397b5fa0a8ad" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 671.218773] env[69328]: DEBUG nova.network.neutron [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 671.220279] env[69328]: DEBUG oslo_concurrency.lockutils [req-42dd9251-a527-405c-822b-0c8e14e7a0f6 req-dffe6145-a594-4401-8d32-0624d3282a1d service nova] Releasing lock "refresh_cache-84baf472-6eb5-4c92-98eb-e35c14bca4e2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 671.220526] env[69328]: DEBUG nova.compute.manager [req-42dd9251-a527-405c-822b-0c8e14e7a0f6 req-dffe6145-a594-4401-8d32-0624d3282a1d service nova] [instance: 9753734d-90f0-4661-8029-ec312e88eb60] Received event network-vif-deleted-9c9fec0b-e493-4950-9da7-bdb3214def9f {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 671.221013] env[69328]: DEBUG nova.compute.manager [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 671.266727] env[69328]: DEBUG oslo_vmware.api [None req-ddde05fb-1d3a-49dd-b167-da0107f56fed tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': task-3272881, 'name': CloneVM_Task} progress is 95%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.267058] env[69328]: DEBUG oslo_vmware.api [None req-01e2ac89-d32a-4ec8-b52a-462827e352bc tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]523ccaac-cc20-c515-2f86-d77d813f8237, 'name': SearchDatastore_Task, 'duration_secs': 0.011334} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.267304] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272888, 'name': CreateVM_Task} progress is 25%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.267630] env[69328]: DEBUG oslo_vmware.api [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3272890, 'name': PowerOnVM_Task, 'duration_secs': 0.531269} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.271954] env[69328]: DEBUG oslo_concurrency.lockutils [None req-01e2ac89-d32a-4ec8-b52a-462827e352bc tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 671.271954] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-01e2ac89-d32a-4ec8-b52a-462827e352bc tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 15a8de08-4d20-4329-9867-53e5dff82878/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318-rescue.vmdk. {{(pid=69328) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 671.272340] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 671.272452] env[69328]: INFO nova.compute.manager [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Took 10.48 seconds to spawn the instance on the hypervisor. [ 671.272743] env[69328]: DEBUG nova.compute.manager [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 671.274594] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d2332411-122f-4ce5-8279-36954dda6130 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.278305] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-279964e4-e96b-4dfa-9eac-598ee2b25257 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.285438] env[69328]: DEBUG nova.policy [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7a36279cfb184fb79ea8f5bd119213b2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '641bd900bcd6477f9ec6a026cf00e42f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 671.294283] env[69328]: DEBUG oslo_vmware.api [None req-01e2ac89-d32a-4ec8-b52a-462827e352bc tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Waiting for the task: (returnval){ [ 671.294283] env[69328]: value = "task-3272891" [ 671.294283] env[69328]: _type = "Task" [ 671.294283] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.310253] env[69328]: DEBUG oslo_vmware.api [None req-01e2ac89-d32a-4ec8-b52a-462827e352bc tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3272891, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.705940] env[69328]: DEBUG nova.network.neutron [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Successfully created port: f66bf51c-4ffe-4da8-a8d8-6b3db6ee56e0 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 671.745975] env[69328]: DEBUG oslo_vmware.api [None req-ddde05fb-1d3a-49dd-b167-da0107f56fed tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': task-3272881, 'name': CloneVM_Task} progress is 95%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.748700] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272888, 'name': CreateVM_Task} progress is 25%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.767350] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a313461d-da55-4e8f-96cd-1cc7e6081d4d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.775255] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4559890-8171-4cd1-b893-6fae322f5a39 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.811217] env[69328]: DEBUG nova.network.neutron [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 671.819571] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fa6db16-43af-4346-b30c-ac121e6ea985 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.823380] env[69328]: DEBUG nova.compute.manager [req-ccd4b5a9-fdd0-4587-bba0-d545ee6578f8 req-2388953c-6ae2-4fde-ab05-cdf9d1e236c9 service nova] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Received event network-vif-plugged-e8f19fa7-2ac8-47ea-bd97-b8bcdc8f3ed6 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 671.823380] env[69328]: DEBUG oslo_concurrency.lockutils [req-ccd4b5a9-fdd0-4587-bba0-d545ee6578f8 req-2388953c-6ae2-4fde-ab05-cdf9d1e236c9 service nova] Acquiring lock "bc9c3a41-7264-4d69-bc15-397b5fa0a8ad-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 671.823631] env[69328]: DEBUG oslo_concurrency.lockutils [req-ccd4b5a9-fdd0-4587-bba0-d545ee6578f8 req-2388953c-6ae2-4fde-ab05-cdf9d1e236c9 service nova] Lock "bc9c3a41-7264-4d69-bc15-397b5fa0a8ad-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 671.823738] env[69328]: DEBUG oslo_concurrency.lockutils [req-ccd4b5a9-fdd0-4587-bba0-d545ee6578f8 req-2388953c-6ae2-4fde-ab05-cdf9d1e236c9 service nova] Lock "bc9c3a41-7264-4d69-bc15-397b5fa0a8ad-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 671.823902] env[69328]: DEBUG nova.compute.manager [req-ccd4b5a9-fdd0-4587-bba0-d545ee6578f8 req-2388953c-6ae2-4fde-ab05-cdf9d1e236c9 service nova] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] No waiting events found dispatching network-vif-plugged-e8f19fa7-2ac8-47ea-bd97-b8bcdc8f3ed6 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 671.824141] env[69328]: WARNING nova.compute.manager [req-ccd4b5a9-fdd0-4587-bba0-d545ee6578f8 req-2388953c-6ae2-4fde-ab05-cdf9d1e236c9 service nova] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Received unexpected event network-vif-plugged-e8f19fa7-2ac8-47ea-bd97-b8bcdc8f3ed6 for instance with vm_state building and task_state spawning. [ 671.824244] env[69328]: DEBUG nova.compute.manager [req-ccd4b5a9-fdd0-4587-bba0-d545ee6578f8 req-2388953c-6ae2-4fde-ab05-cdf9d1e236c9 service nova] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Received event network-changed-e8f19fa7-2ac8-47ea-bd97-b8bcdc8f3ed6 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 671.824368] env[69328]: DEBUG nova.compute.manager [req-ccd4b5a9-fdd0-4587-bba0-d545ee6578f8 req-2388953c-6ae2-4fde-ab05-cdf9d1e236c9 service nova] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Refreshing instance network info cache due to event network-changed-e8f19fa7-2ac8-47ea-bd97-b8bcdc8f3ed6. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 671.824525] env[69328]: DEBUG oslo_concurrency.lockutils [req-ccd4b5a9-fdd0-4587-bba0-d545ee6578f8 req-2388953c-6ae2-4fde-ab05-cdf9d1e236c9 service nova] Acquiring lock "refresh_cache-bc9c3a41-7264-4d69-bc15-397b5fa0a8ad" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 671.824974] env[69328]: INFO nova.compute.manager [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Took 48.78 seconds to build instance. [ 671.836795] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e35f8727-e09c-48a8-8d8c-afe6a6da7845 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.842862] env[69328]: DEBUG oslo_vmware.api [None req-01e2ac89-d32a-4ec8-b52a-462827e352bc tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3272891, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.471645} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.842862] env[69328]: INFO nova.virt.vmwareapi.ds_util [None req-01e2ac89-d32a-4ec8-b52a-462827e352bc tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 15a8de08-4d20-4329-9867-53e5dff82878/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318-rescue.vmdk. [ 671.843826] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2d59c90-9c11-4044-b18a-885e6f1861c3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.856185] env[69328]: DEBUG nova.compute.provider_tree [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 671.881974] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-01e2ac89-d32a-4ec8-b52a-462827e352bc tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Reconfiguring VM instance instance-00000015 to attach disk [datastore2] 15a8de08-4d20-4329-9867-53e5dff82878/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318-rescue.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 671.885701] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-802fe7e5-6cd3-444d-9c07-2731056a78c3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.905009] env[69328]: DEBUG oslo_vmware.api [None req-01e2ac89-d32a-4ec8-b52a-462827e352bc tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Waiting for the task: (returnval){ [ 671.905009] env[69328]: value = "task-3272893" [ 671.905009] env[69328]: _type = "Task" [ 671.905009] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.914209] env[69328]: DEBUG oslo_vmware.api [None req-01e2ac89-d32a-4ec8-b52a-462827e352bc tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3272893, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.163699] env[69328]: DEBUG nova.network.neutron [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Updating instance_info_cache with network_info: [{"id": "e8f19fa7-2ac8-47ea-bd97-b8bcdc8f3ed6", "address": "fa:16:3e:64:7f:a9", "network": {"id": "2e8bdd1b-0cca-4f7c-bba3-ff1750073020", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2058593014-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "088bc9e3aeb449baa0a522342d57d183", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8f19fa7-2a", "ovs_interfaceid": "e8f19fa7-2ac8-47ea-bd97-b8bcdc8f3ed6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 672.238052] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272888, 'name': CreateVM_Task} progress is 25%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.241842] env[69328]: DEBUG oslo_vmware.api [None req-ddde05fb-1d3a-49dd-b167-da0107f56fed tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': task-3272881, 'name': CloneVM_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.254321] env[69328]: DEBUG nova.compute.manager [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 672.281773] env[69328]: DEBUG nova.virt.hardware [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 672.282090] env[69328]: DEBUG nova.virt.hardware [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 672.282192] env[69328]: DEBUG nova.virt.hardware [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 672.282365] env[69328]: DEBUG nova.virt.hardware [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 672.282506] env[69328]: DEBUG nova.virt.hardware [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 672.282647] env[69328]: DEBUG nova.virt.hardware [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 672.283374] env[69328]: DEBUG nova.virt.hardware [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 672.283374] env[69328]: DEBUG nova.virt.hardware [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 672.283374] env[69328]: DEBUG nova.virt.hardware [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 672.283374] env[69328]: DEBUG nova.virt.hardware [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 672.283542] env[69328]: DEBUG nova.virt.hardware [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 672.285041] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb80de7a-f842-4eac-88f6-8f4983d40e3f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.292383] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6614f4f6-2e35-470b-85db-e738b9ba0d98 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.328208] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fe6c8893-d845-4218-ab54-ac073d42bcda tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Lock "5b0e8bef-dcfc-4c5e-89d2-aa1748050d29" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.746s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 672.359813] env[69328]: DEBUG nova.scheduler.client.report [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 672.416249] env[69328]: DEBUG oslo_vmware.api [None req-01e2ac89-d32a-4ec8-b52a-462827e352bc tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3272893, 'name': ReconfigVM_Task, 'duration_secs': 0.370044} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.416249] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-01e2ac89-d32a-4ec8-b52a-462827e352bc tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Reconfigured VM instance instance-00000015 to attach disk [datastore2] 15a8de08-4d20-4329-9867-53e5dff82878/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318-rescue.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 672.416583] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4400318-af28-4e50-88b7-d90234e62da8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.441188] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-10aa143b-c016-475e-b61b-14ce755d5f1f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.461955] env[69328]: DEBUG oslo_vmware.api [None req-01e2ac89-d32a-4ec8-b52a-462827e352bc tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Waiting for the task: (returnval){ [ 672.461955] env[69328]: value = "task-3272894" [ 672.461955] env[69328]: _type = "Task" [ 672.461955] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.472189] env[69328]: DEBUG oslo_vmware.api [None req-01e2ac89-d32a-4ec8-b52a-462827e352bc tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3272894, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.666461] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Releasing lock "refresh_cache-bc9c3a41-7264-4d69-bc15-397b5fa0a8ad" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 672.666863] env[69328]: DEBUG nova.compute.manager [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Instance network_info: |[{"id": "e8f19fa7-2ac8-47ea-bd97-b8bcdc8f3ed6", "address": "fa:16:3e:64:7f:a9", "network": {"id": "2e8bdd1b-0cca-4f7c-bba3-ff1750073020", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2058593014-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "088bc9e3aeb449baa0a522342d57d183", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8f19fa7-2a", "ovs_interfaceid": "e8f19fa7-2ac8-47ea-bd97-b8bcdc8f3ed6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 672.667218] env[69328]: DEBUG oslo_concurrency.lockutils [req-ccd4b5a9-fdd0-4587-bba0-d545ee6578f8 req-2388953c-6ae2-4fde-ab05-cdf9d1e236c9 service nova] Acquired lock "refresh_cache-bc9c3a41-7264-4d69-bc15-397b5fa0a8ad" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 672.667447] env[69328]: DEBUG nova.network.neutron [req-ccd4b5a9-fdd0-4587-bba0-d545ee6578f8 req-2388953c-6ae2-4fde-ab05-cdf9d1e236c9 service nova] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Refreshing network info cache for port e8f19fa7-2ac8-47ea-bd97-b8bcdc8f3ed6 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 672.668753] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:64:7f:a9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '43ad01d2-c7dd-453c-a929-8ad76294d13c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e8f19fa7-2ac8-47ea-bd97-b8bcdc8f3ed6', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 672.678020] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Creating folder: Project (088bc9e3aeb449baa0a522342d57d183). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 672.678613] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1633087d-6b60-4f3e-9de7-0509ea08782d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.691907] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Created folder: Project (088bc9e3aeb449baa0a522342d57d183) in parent group-v653649. [ 672.692144] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Creating folder: Instances. Parent ref: group-v653724. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 672.692376] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9ad40db8-29b4-49f1-8ddb-0a5cad05140b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.703982] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Created folder: Instances in parent group-v653724. [ 672.704348] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 672.704688] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 672.704990] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8843bda5-218d-4cef-8589-20e630b68a77 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.729357] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 672.729357] env[69328]: value = "task-3272897" [ 672.729357] env[69328]: _type = "Task" [ 672.729357] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.740641] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272888, 'name': CreateVM_Task, 'duration_secs': 3.042826} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.741054] env[69328]: DEBUG oslo_vmware.api [None req-ddde05fb-1d3a-49dd-b167-da0107f56fed tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': task-3272881, 'name': CloneVM_Task, 'duration_secs': 4.076743} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.741666] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 672.742413] env[69328]: INFO nova.virt.vmwareapi.vmops [None req-ddde05fb-1d3a-49dd-b167-da0107f56fed tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Created linked-clone VM from snapshot [ 672.742741] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 672.742951] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 672.743331] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 672.744198] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fef5632-f161-4107-8f6b-2be507c46863 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.749979] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8d8e095-2c1c-47d3-9852-9f42a0dd7d13 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.753627] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272897, 'name': CreateVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.759265] env[69328]: DEBUG oslo_vmware.api [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Waiting for the task: (returnval){ [ 672.759265] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]528a6532-a94d-4b3b-9129-5e0de9c4871e" [ 672.759265] env[69328]: _type = "Task" [ 672.759265] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.762655] env[69328]: DEBUG nova.virt.vmwareapi.images [None req-ddde05fb-1d3a-49dd-b167-da0107f56fed tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Uploading image e30468af-18ba-405b-9493-86d69d242d95 {{(pid=69328) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 672.774136] env[69328]: DEBUG oslo_vmware.api [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]528a6532-a94d-4b3b-9129-5e0de9c4871e, 'name': SearchDatastore_Task, 'duration_secs': 0.012223} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.775308] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 672.775308] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 672.775308] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 672.775308] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 672.775967] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 672.775967] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e9a8f14d-0f8a-4dd4-807a-52ecf7c79f2b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.784704] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 672.784907] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 672.785774] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c1a8fe1d-39f4-4939-91da-9e258e94fb5c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.790619] env[69328]: DEBUG oslo_vmware.rw_handles [None req-ddde05fb-1d3a-49dd-b167-da0107f56fed tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 672.790619] env[69328]: value = "vm-653720" [ 672.790619] env[69328]: _type = "VirtualMachine" [ 672.790619] env[69328]: }. {{(pid=69328) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 672.791144] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-ee39ef03-60ae-4b28-adab-47b7a76044c7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.796573] env[69328]: DEBUG oslo_vmware.api [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Waiting for the task: (returnval){ [ 672.796573] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b73cba-549c-bf99-8048-9a58c409c2b1" [ 672.796573] env[69328]: _type = "Task" [ 672.796573] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.802085] env[69328]: DEBUG oslo_vmware.rw_handles [None req-ddde05fb-1d3a-49dd-b167-da0107f56fed tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Lease: (returnval){ [ 672.802085] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5250281b-f609-8770-69c1-c9bdc8fbfb7d" [ 672.802085] env[69328]: _type = "HttpNfcLease" [ 672.802085] env[69328]: } obtained for exporting VM: (result){ [ 672.802085] env[69328]: value = "vm-653720" [ 672.802085] env[69328]: _type = "VirtualMachine" [ 672.802085] env[69328]: }. {{(pid=69328) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 672.802523] env[69328]: DEBUG oslo_vmware.api [None req-ddde05fb-1d3a-49dd-b167-da0107f56fed tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Waiting for the lease: (returnval){ [ 672.802523] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5250281b-f609-8770-69c1-c9bdc8fbfb7d" [ 672.802523] env[69328]: _type = "HttpNfcLease" [ 672.802523] env[69328]: } to be ready. {{(pid=69328) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 672.810255] env[69328]: DEBUG oslo_vmware.api [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b73cba-549c-bf99-8048-9a58c409c2b1, 'name': SearchDatastore_Task, 'duration_secs': 0.009748} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.811582] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13872d50-5810-4193-9340-efec5869a060 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.816042] env[69328]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 672.816042] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5250281b-f609-8770-69c1-c9bdc8fbfb7d" [ 672.816042] env[69328]: _type = "HttpNfcLease" [ 672.816042] env[69328]: } is ready. {{(pid=69328) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 672.816794] env[69328]: DEBUG oslo_vmware.rw_handles [None req-ddde05fb-1d3a-49dd-b167-da0107f56fed tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 672.816794] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5250281b-f609-8770-69c1-c9bdc8fbfb7d" [ 672.816794] env[69328]: _type = "HttpNfcLease" [ 672.816794] env[69328]: }. {{(pid=69328) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 672.817839] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d4f384c-8a41-498a-a4d8-24580a89a0b7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.822135] env[69328]: DEBUG oslo_vmware.api [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Waiting for the task: (returnval){ [ 672.822135] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5268da84-ea76-eec0-15a0-1c7637a4ae20" [ 672.822135] env[69328]: _type = "Task" [ 672.822135] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.832347] env[69328]: DEBUG nova.compute.manager [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 672.836663] env[69328]: DEBUG oslo_vmware.rw_handles [None req-ddde05fb-1d3a-49dd-b167-da0107f56fed tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528cbe37-e35e-7142-4b87-7855ec059e00/disk-0.vmdk from lease info. {{(pid=69328) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 672.837232] env[69328]: DEBUG oslo_vmware.rw_handles [None req-ddde05fb-1d3a-49dd-b167-da0107f56fed tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528cbe37-e35e-7142-4b87-7855ec059e00/disk-0.vmdk for reading. {{(pid=69328) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 672.908652] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.886s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 672.909238] env[69328]: DEBUG nova.compute.manager [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] [instance: c3673531-9167-4d33-b8ce-d6afa5e589bc] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 672.912172] env[69328]: DEBUG oslo_vmware.api [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5268da84-ea76-eec0-15a0-1c7637a4ae20, 'name': SearchDatastore_Task, 'duration_secs': 0.011122} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.917020] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f41c64e8-47aa-4396-9140-5604f5e9a675 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.657s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 672.917020] env[69328]: DEBUG nova.objects.instance [None req-f41c64e8-47aa-4396-9140-5604f5e9a675 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Lazy-loading 'resources' on Instance uuid edb1a21a-6907-4198-a977-c1213e8fecc0 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 672.917020] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 672.917020] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 84baf472-6eb5-4c92-98eb-e35c14bca4e2/84baf472-6eb5-4c92-98eb-e35c14bca4e2.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 672.917358] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2118c4db-8eef-4721-8fdd-892493455a57 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.925374] env[69328]: DEBUG oslo_vmware.api [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Waiting for the task: (returnval){ [ 672.925374] env[69328]: value = "task-3272899" [ 672.925374] env[69328]: _type = "Task" [ 672.925374] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.934530] env[69328]: DEBUG oslo_vmware.api [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Task: {'id': task-3272899, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.952942] env[69328]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-cb34ee47-64c3-4e8f-a10d-a5362a6c7ed9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.973936] env[69328]: DEBUG oslo_vmware.api [None req-01e2ac89-d32a-4ec8-b52a-462827e352bc tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3272894, 'name': ReconfigVM_Task, 'duration_secs': 0.361127} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.974972] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-01e2ac89-d32a-4ec8-b52a-462827e352bc tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 672.975281] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b64f7d11-f002-4137-a92b-5c434bec0cd8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.984832] env[69328]: DEBUG oslo_vmware.api [None req-01e2ac89-d32a-4ec8-b52a-462827e352bc tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Waiting for the task: (returnval){ [ 672.984832] env[69328]: value = "task-3272900" [ 672.984832] env[69328]: _type = "Task" [ 672.984832] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.997488] env[69328]: DEBUG oslo_vmware.api [None req-01e2ac89-d32a-4ec8-b52a-462827e352bc tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3272900, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.246953] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272897, 'name': CreateVM_Task, 'duration_secs': 0.328575} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.246953] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 673.247734] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 673.247911] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 673.248602] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 673.248751] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58e6247e-8fb2-4f57-af90-858b084135a1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.258634] env[69328]: DEBUG oslo_vmware.api [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for the task: (returnval){ [ 673.258634] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52002c29-04fc-9ed4-a883-8e799819f0ab" [ 673.258634] env[69328]: _type = "Task" [ 673.258634] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.267694] env[69328]: DEBUG oslo_vmware.api [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52002c29-04fc-9ed4-a883-8e799819f0ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.365845] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 673.421546] env[69328]: DEBUG nova.compute.utils [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 673.425050] env[69328]: DEBUG nova.compute.manager [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] [instance: c3673531-9167-4d33-b8ce-d6afa5e589bc] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 673.425050] env[69328]: DEBUG nova.network.neutron [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] [instance: c3673531-9167-4d33-b8ce-d6afa5e589bc] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 673.446605] env[69328]: DEBUG oslo_vmware.api [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Task: {'id': task-3272899, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.499454] env[69328]: DEBUG oslo_vmware.api [None req-01e2ac89-d32a-4ec8-b52a-462827e352bc tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3272900, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.505713] env[69328]: DEBUG nova.policy [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b1885f39de52408ba9c2846c5292edda', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0ea10f2d436a4d758f514b2457ca195f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 673.564934] env[69328]: DEBUG nova.network.neutron [req-ccd4b5a9-fdd0-4587-bba0-d545ee6578f8 req-2388953c-6ae2-4fde-ab05-cdf9d1e236c9 service nova] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Updated VIF entry in instance network info cache for port e8f19fa7-2ac8-47ea-bd97-b8bcdc8f3ed6. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 673.566535] env[69328]: DEBUG nova.network.neutron [req-ccd4b5a9-fdd0-4587-bba0-d545ee6578f8 req-2388953c-6ae2-4fde-ab05-cdf9d1e236c9 service nova] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Updating instance_info_cache with network_info: [{"id": "e8f19fa7-2ac8-47ea-bd97-b8bcdc8f3ed6", "address": "fa:16:3e:64:7f:a9", "network": {"id": "2e8bdd1b-0cca-4f7c-bba3-ff1750073020", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2058593014-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "088bc9e3aeb449baa0a522342d57d183", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8f19fa7-2a", "ovs_interfaceid": "e8f19fa7-2ac8-47ea-bd97-b8bcdc8f3ed6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 673.736805] env[69328]: DEBUG nova.network.neutron [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Successfully updated port: f66bf51c-4ffe-4da8-a8d8-6b3db6ee56e0 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 673.778571] env[69328]: DEBUG oslo_vmware.api [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52002c29-04fc-9ed4-a883-8e799819f0ab, 'name': SearchDatastore_Task, 'duration_secs': 0.021492} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.778992] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 673.779445] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 673.779445] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 673.779744] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 673.779840] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 673.780252] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0f13d26e-7a80-45c1-bf9c-6c7864646617 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.793060] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 673.793060] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 673.794116] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e46b80ba-a6ec-4ada-b51a-c8cadf2b08c9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.805688] env[69328]: DEBUG oslo_vmware.api [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for the task: (returnval){ [ 673.805688] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b8f336-3d3d-e275-96b1-b2906d181b7f" [ 673.805688] env[69328]: _type = "Task" [ 673.805688] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.817538] env[69328]: DEBUG oslo_vmware.api [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b8f336-3d3d-e275-96b1-b2906d181b7f, 'name': SearchDatastore_Task, 'duration_secs': 0.009355} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.821351] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa158040-3301-40e6-b8d0-270388b18be3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.829179] env[69328]: DEBUG oslo_vmware.api [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for the task: (returnval){ [ 673.829179] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5216267a-6a19-1466-3b89-8780db4bf5e0" [ 673.829179] env[69328]: _type = "Task" [ 673.829179] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.835650] env[69328]: DEBUG oslo_vmware.api [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5216267a-6a19-1466-3b89-8780db4bf5e0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.930998] env[69328]: DEBUG nova.compute.manager [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] [instance: c3673531-9167-4d33-b8ce-d6afa5e589bc] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 673.946772] env[69328]: DEBUG oslo_vmware.api [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Task: {'id': task-3272899, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.544945} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.949945] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 84baf472-6eb5-4c92-98eb-e35c14bca4e2/84baf472-6eb5-4c92-98eb-e35c14bca4e2.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 673.950332] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 673.950887] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-050b0c4c-cd9c-4cc9-abbf-5ffca6aabfd2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.958041] env[69328]: DEBUG oslo_vmware.api [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Waiting for the task: (returnval){ [ 673.958041] env[69328]: value = "task-3272902" [ 673.958041] env[69328]: _type = "Task" [ 673.958041] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.970847] env[69328]: DEBUG oslo_vmware.api [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Task: {'id': task-3272902, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.997675] env[69328]: DEBUG oslo_vmware.api [None req-01e2ac89-d32a-4ec8-b52a-462827e352bc tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3272900, 'name': PowerOnVM_Task, 'duration_secs': 0.520256} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.999395] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-01e2ac89-d32a-4ec8-b52a-462827e352bc tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 674.002801] env[69328]: DEBUG nova.compute.manager [req-b16c224c-2ef3-4d5b-8b06-9f16e29dae3a req-063d6f06-6735-4d2d-84cb-6eb24723697b service nova] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Received event network-vif-plugged-f66bf51c-4ffe-4da8-a8d8-6b3db6ee56e0 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 674.003633] env[69328]: DEBUG oslo_concurrency.lockutils [req-b16c224c-2ef3-4d5b-8b06-9f16e29dae3a req-063d6f06-6735-4d2d-84cb-6eb24723697b service nova] Acquiring lock "bbbfb48d-b474-4a6e-9078-336f23d2c343-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 674.004403] env[69328]: DEBUG oslo_concurrency.lockutils [req-b16c224c-2ef3-4d5b-8b06-9f16e29dae3a req-063d6f06-6735-4d2d-84cb-6eb24723697b service nova] Lock "bbbfb48d-b474-4a6e-9078-336f23d2c343-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 674.004403] env[69328]: DEBUG oslo_concurrency.lockutils [req-b16c224c-2ef3-4d5b-8b06-9f16e29dae3a req-063d6f06-6735-4d2d-84cb-6eb24723697b service nova] Lock "bbbfb48d-b474-4a6e-9078-336f23d2c343-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 674.004403] env[69328]: DEBUG nova.compute.manager [req-b16c224c-2ef3-4d5b-8b06-9f16e29dae3a req-063d6f06-6735-4d2d-84cb-6eb24723697b service nova] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] No waiting events found dispatching network-vif-plugged-f66bf51c-4ffe-4da8-a8d8-6b3db6ee56e0 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 674.005647] env[69328]: WARNING nova.compute.manager [req-b16c224c-2ef3-4d5b-8b06-9f16e29dae3a req-063d6f06-6735-4d2d-84cb-6eb24723697b service nova] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Received unexpected event network-vif-plugged-f66bf51c-4ffe-4da8-a8d8-6b3db6ee56e0 for instance with vm_state building and task_state spawning. [ 674.009798] env[69328]: DEBUG nova.compute.manager [None req-01e2ac89-d32a-4ec8-b52a-462827e352bc tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 674.011369] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33ee7241-dd8a-4d65-b8d2-24500917df97 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.051539] env[69328]: DEBUG nova.network.neutron [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] [instance: c3673531-9167-4d33-b8ce-d6afa5e589bc] Successfully created port: 5deae7a2-4461-4670-a9f6-fda626bae672 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 674.072289] env[69328]: DEBUG oslo_concurrency.lockutils [req-ccd4b5a9-fdd0-4587-bba0-d545ee6578f8 req-2388953c-6ae2-4fde-ab05-cdf9d1e236c9 service nova] Releasing lock "refresh_cache-bc9c3a41-7264-4d69-bc15-397b5fa0a8ad" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 674.073360] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c82a8e5-7ec7-4e60-8c7b-384f8c8cc33e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.082894] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67eced2a-98a4-4cd3-a157-57193543b87d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.119070] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c8fa6db-e32f-49da-ab5b-8d64df0db0f5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.127667] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9531375c-e054-48c7-af1a-1078d16130c7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.146410] env[69328]: DEBUG nova.compute.provider_tree [None req-f41c64e8-47aa-4396-9140-5604f5e9a675 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 674.240640] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Acquiring lock "refresh_cache-bbbfb48d-b474-4a6e-9078-336f23d2c343" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 674.240800] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Acquired lock "refresh_cache-bbbfb48d-b474-4a6e-9078-336f23d2c343" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 674.240957] env[69328]: DEBUG nova.network.neutron [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 674.349240] env[69328]: DEBUG oslo_vmware.api [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5216267a-6a19-1466-3b89-8780db4bf5e0, 'name': SearchDatastore_Task, 'duration_secs': 0.009248} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.349240] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 674.349240] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] bc9c3a41-7264-4d69-bc15-397b5fa0a8ad/bc9c3a41-7264-4d69-bc15-397b5fa0a8ad.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 674.349240] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4f64a7fe-102a-4918-a170-624dead18648 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.358675] env[69328]: DEBUG oslo_vmware.api [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for the task: (returnval){ [ 674.358675] env[69328]: value = "task-3272903" [ 674.358675] env[69328]: _type = "Task" [ 674.358675] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.375535] env[69328]: DEBUG oslo_vmware.api [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3272903, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.468874] env[69328]: DEBUG oslo_vmware.api [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Task: {'id': task-3272902, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072596} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.469885] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 674.470870] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4769821d-43e5-4a0a-864c-8ce9b42cc45c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.500510] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] Reconfiguring VM instance instance-00000017 to attach disk [datastore1] 84baf472-6eb5-4c92-98eb-e35c14bca4e2/84baf472-6eb5-4c92-98eb-e35c14bca4e2.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 674.502339] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b0bd9464-9d3a-42f7-aeb5-9c87fae718d3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.529616] env[69328]: DEBUG oslo_vmware.api [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Waiting for the task: (returnval){ [ 674.529616] env[69328]: value = "task-3272904" [ 674.529616] env[69328]: _type = "Task" [ 674.529616] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.544119] env[69328]: DEBUG oslo_vmware.api [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Task: {'id': task-3272904, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.651914] env[69328]: DEBUG nova.scheduler.client.report [None req-f41c64e8-47aa-4396-9140-5604f5e9a675 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 674.789187] env[69328]: DEBUG nova.network.neutron [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 674.870581] env[69328]: DEBUG oslo_vmware.api [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3272903, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.949791] env[69328]: DEBUG nova.compute.manager [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] [instance: c3673531-9167-4d33-b8ce-d6afa5e589bc] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 674.962841] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Acquiring lock "5292b759-9d1f-486a-b4d6-90519b3ae986" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 674.963230] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Lock "5292b759-9d1f-486a-b4d6-90519b3ae986" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 674.981187] env[69328]: DEBUG nova.virt.hardware [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 674.981552] env[69328]: DEBUG nova.virt.hardware [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 674.981725] env[69328]: DEBUG nova.virt.hardware [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 674.981916] env[69328]: DEBUG nova.virt.hardware [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 674.982102] env[69328]: DEBUG nova.virt.hardware [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 674.982271] env[69328]: DEBUG nova.virt.hardware [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 674.982494] env[69328]: DEBUG nova.virt.hardware [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 674.982798] env[69328]: DEBUG nova.virt.hardware [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 674.982854] env[69328]: DEBUG nova.virt.hardware [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 674.982984] env[69328]: DEBUG nova.virt.hardware [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 674.983196] env[69328]: DEBUG nova.virt.hardware [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 674.984495] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f947d82c-8e3e-41fe-b6d6-069ca51725a2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.996936] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Acquiring lock "afa25f89-ccda-4b77-aaa1-a3b62b53d870" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 674.997196] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Lock "afa25f89-ccda-4b77-aaa1-a3b62b53d870" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 675.001169] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-311cef8b-b094-4990-bd58-360bc8806a13 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.021494] env[69328]: DEBUG nova.network.neutron [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Updating instance_info_cache with network_info: [{"id": "f66bf51c-4ffe-4da8-a8d8-6b3db6ee56e0", "address": "fa:16:3e:eb:6a:37", "network": {"id": "4707aea3-ce46-4fe0-bf5c-141ee5596a86", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.166", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ecd184c6b78b4e4297fb93abb94aa37d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf66bf51c-4f", "ovs_interfaceid": "f66bf51c-4ffe-4da8-a8d8-6b3db6ee56e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 675.024423] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Acquiring lock "690096cf-a0bd-4db1-ad97-8d8a37ad7c84" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 675.024423] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Lock "690096cf-a0bd-4db1-ad97-8d8a37ad7c84" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 675.043530] env[69328]: DEBUG oslo_vmware.api [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Task: {'id': task-3272904, 'name': ReconfigVM_Task, 'duration_secs': 0.45969} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.043893] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] Reconfigured VM instance instance-00000017 to attach disk [datastore1] 84baf472-6eb5-4c92-98eb-e35c14bca4e2/84baf472-6eb5-4c92-98eb-e35c14bca4e2.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 675.044581] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ef8673db-4a08-4873-90cf-561555ce9e8e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.052523] env[69328]: DEBUG oslo_vmware.api [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Waiting for the task: (returnval){ [ 675.052523] env[69328]: value = "task-3272905" [ 675.052523] env[69328]: _type = "Task" [ 675.052523] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.066184] env[69328]: DEBUG oslo_vmware.api [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Task: {'id': task-3272905, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.160275] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f41c64e8-47aa-4396-9140-5604f5e9a675 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.246s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 675.163159] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 31.107s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 675.163159] env[69328]: DEBUG nova.objects.instance [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69328) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 675.186383] env[69328]: INFO nova.scheduler.client.report [None req-f41c64e8-47aa-4396-9140-5604f5e9a675 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Deleted allocations for instance edb1a21a-6907-4198-a977-c1213e8fecc0 [ 675.369476] env[69328]: DEBUG oslo_vmware.api [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3272903, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.524995} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.369790] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] bc9c3a41-7264-4d69-bc15-397b5fa0a8ad/bc9c3a41-7264-4d69-bc15-397b5fa0a8ad.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 675.370031] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 675.370302] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7eafbcf9-c3ed-45c8-a908-791ad9bbcf79 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.381293] env[69328]: DEBUG oslo_vmware.api [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for the task: (returnval){ [ 675.381293] env[69328]: value = "task-3272906" [ 675.381293] env[69328]: _type = "Task" [ 675.381293] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.390020] env[69328]: DEBUG oslo_vmware.api [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3272906, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.524407] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Releasing lock "refresh_cache-bbbfb48d-b474-4a6e-9078-336f23d2c343" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 675.524840] env[69328]: DEBUG nova.compute.manager [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Instance network_info: |[{"id": "f66bf51c-4ffe-4da8-a8d8-6b3db6ee56e0", "address": "fa:16:3e:eb:6a:37", "network": {"id": "4707aea3-ce46-4fe0-bf5c-141ee5596a86", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.166", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ecd184c6b78b4e4297fb93abb94aa37d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf66bf51c-4f", "ovs_interfaceid": "f66bf51c-4ffe-4da8-a8d8-6b3db6ee56e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 675.525314] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:eb:6a:37', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a8b99a46-3e7f-4ef1-9e45-58e6cd17f210', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f66bf51c-4ffe-4da8-a8d8-6b3db6ee56e0', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 675.535538] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 675.535787] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 675.535998] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e72ea9c5-f0bf-4720-a31c-773b07619f0f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.566390] env[69328]: DEBUG oslo_vmware.api [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Task: {'id': task-3272905, 'name': Rename_Task, 'duration_secs': 0.235522} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.568661] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 675.568918] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 675.568918] env[69328]: value = "task-3272907" [ 675.568918] env[69328]: _type = "Task" [ 675.568918] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.569118] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ddf28a6c-010a-4ab4-8b38-dcbab4c3b28c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.580967] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272907, 'name': CreateVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.582570] env[69328]: DEBUG oslo_vmware.api [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Waiting for the task: (returnval){ [ 675.582570] env[69328]: value = "task-3272908" [ 675.582570] env[69328]: _type = "Task" [ 675.582570] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.592828] env[69328]: DEBUG oslo_vmware.api [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Task: {'id': task-3272908, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.700313] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f41c64e8-47aa-4396-9140-5604f5e9a675 tempest-ServerAddressesTestJSON-803335342 tempest-ServerAddressesTestJSON-803335342-project-member] Lock "edb1a21a-6907-4198-a977-c1213e8fecc0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.026s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 675.891697] env[69328]: DEBUG oslo_vmware.api [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3272906, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.099415} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.891994] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 675.893477] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dd3ba40-3d03-44d1-ac2c-680f5172a5a0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.917451] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Reconfiguring VM instance instance-00000018 to attach disk [datastore2] bc9c3a41-7264-4d69-bc15-397b5fa0a8ad/bc9c3a41-7264-4d69-bc15-397b5fa0a8ad.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 675.917815] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4c944417-1025-40d6-af4c-e82ca8378f3e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.939279] env[69328]: DEBUG oslo_vmware.api [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for the task: (returnval){ [ 675.939279] env[69328]: value = "task-3272910" [ 675.939279] env[69328]: _type = "Task" [ 675.939279] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.947967] env[69328]: DEBUG oslo_vmware.api [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3272910, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.082387] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272907, 'name': CreateVM_Task, 'duration_secs': 0.356392} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.082581] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 676.083336] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 676.083514] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 676.083907] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 676.084313] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ccc35ac4-803e-48eb-8246-b0dc89c0510a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.094922] env[69328]: DEBUG oslo_vmware.api [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Waiting for the task: (returnval){ [ 676.094922] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d92348-0ac9-60e3-d2ec-8b22b77d6f8b" [ 676.094922] env[69328]: _type = "Task" [ 676.094922] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.100017] env[69328]: DEBUG oslo_vmware.api [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Task: {'id': task-3272908, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.108380] env[69328]: DEBUG oslo_vmware.api [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d92348-0ac9-60e3-d2ec-8b22b77d6f8b, 'name': SearchDatastore_Task, 'duration_secs': 0.010617} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.108702] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 676.108946] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 676.109202] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 676.110250] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 676.110250] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 676.110250] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-20a0175a-a3f1-40d0-9b9e-925ded7d0e6f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.121018] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 676.121018] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 676.121018] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-34178e67-dc11-46cb-9a81-d2fc435100a0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.127892] env[69328]: DEBUG oslo_vmware.api [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Waiting for the task: (returnval){ [ 676.127892] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d80077-00dd-59b9-0936-7d03779e849a" [ 676.127892] env[69328]: _type = "Task" [ 676.127892] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.142084] env[69328]: DEBUG oslo_vmware.api [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d80077-00dd-59b9-0936-7d03779e849a, 'name': SearchDatastore_Task, 'duration_secs': 0.009016} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.144206] env[69328]: DEBUG nova.network.neutron [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] [instance: c3673531-9167-4d33-b8ce-d6afa5e589bc] Successfully updated port: 5deae7a2-4461-4670-a9f6-fda626bae672 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 676.146794] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4623edf6-c654-47c4-9694-d8a62b7b9ac1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.154923] env[69328]: DEBUG oslo_vmware.api [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Waiting for the task: (returnval){ [ 676.154923] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f9b6ea-a065-188e-1963-620159331954" [ 676.154923] env[69328]: _type = "Task" [ 676.154923] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.168479] env[69328]: DEBUG oslo_vmware.api [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f9b6ea-a065-188e-1963-620159331954, 'name': SearchDatastore_Task, 'duration_secs': 0.010913} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.168648] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 676.168884] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] bbbfb48d-b474-4a6e-9078-336f23d2c343/bbbfb48d-b474-4a6e-9078-336f23d2c343.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 676.169183] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4ff7427a-de8f-40bb-97d0-d47f9dc7334a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.182478] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d9d4b398-de00-47fa-9313-0769afb531b9 tempest-ServersAdmin275Test-1865321521 tempest-ServersAdmin275Test-1865321521-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.020s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 676.184151] env[69328]: DEBUG oslo_vmware.api [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Waiting for the task: (returnval){ [ 676.184151] env[69328]: value = "task-3272911" [ 676.184151] env[69328]: _type = "Task" [ 676.184151] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.184151] env[69328]: DEBUG oslo_concurrency.lockutils [None req-da766223-5256-485d-87d7-8ec2ca958509 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.871s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 676.184151] env[69328]: DEBUG nova.objects.instance [None req-da766223-5256-485d-87d7-8ec2ca958509 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Lazy-loading 'resources' on Instance uuid 7b348a95-3ab2-4112-87e3-b17504c0a302 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 676.197650] env[69328]: DEBUG oslo_vmware.api [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3272911, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.289736] env[69328]: DEBUG nova.compute.manager [req-b1fa59be-0b98-4b71-b3e9-32974920e776 req-47781baa-be26-4bef-aad2-779ad11ff622 service nova] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Received event network-changed-f66bf51c-4ffe-4da8-a8d8-6b3db6ee56e0 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 676.289971] env[69328]: DEBUG nova.compute.manager [req-b1fa59be-0b98-4b71-b3e9-32974920e776 req-47781baa-be26-4bef-aad2-779ad11ff622 service nova] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Refreshing instance network info cache due to event network-changed-f66bf51c-4ffe-4da8-a8d8-6b3db6ee56e0. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 676.290246] env[69328]: DEBUG oslo_concurrency.lockutils [req-b1fa59be-0b98-4b71-b3e9-32974920e776 req-47781baa-be26-4bef-aad2-779ad11ff622 service nova] Acquiring lock "refresh_cache-bbbfb48d-b474-4a6e-9078-336f23d2c343" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 676.290516] env[69328]: DEBUG oslo_concurrency.lockutils [req-b1fa59be-0b98-4b71-b3e9-32974920e776 req-47781baa-be26-4bef-aad2-779ad11ff622 service nova] Acquired lock "refresh_cache-bbbfb48d-b474-4a6e-9078-336f23d2c343" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 676.290715] env[69328]: DEBUG nova.network.neutron [req-b1fa59be-0b98-4b71-b3e9-32974920e776 req-47781baa-be26-4bef-aad2-779ad11ff622 service nova] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Refreshing network info cache for port f66bf51c-4ffe-4da8-a8d8-6b3db6ee56e0 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 676.320205] env[69328]: DEBUG oslo_concurrency.lockutils [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Acquiring lock "99e31dfd-5d41-4564-886f-becc25ca289c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 676.320364] env[69328]: DEBUG oslo_concurrency.lockutils [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Lock "99e31dfd-5d41-4564-886f-becc25ca289c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 676.453517] env[69328]: DEBUG oslo_vmware.api [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3272910, 'name': ReconfigVM_Task, 'duration_secs': 0.370542} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.453906] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Reconfigured VM instance instance-00000018 to attach disk [datastore2] bc9c3a41-7264-4d69-bc15-397b5fa0a8ad/bc9c3a41-7264-4d69-bc15-397b5fa0a8ad.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 676.454831] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-de9eaad1-cfab-4017-a040-360809e05c5c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.463353] env[69328]: DEBUG oslo_vmware.api [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for the task: (returnval){ [ 676.463353] env[69328]: value = "task-3272912" [ 676.463353] env[69328]: _type = "Task" [ 676.463353] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.474062] env[69328]: DEBUG oslo_vmware.api [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3272912, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.596819] env[69328]: DEBUG oslo_vmware.api [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Task: {'id': task-3272908, 'name': PowerOnVM_Task, 'duration_secs': 0.664023} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.596819] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 676.596819] env[69328]: INFO nova.compute.manager [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] Took 9.90 seconds to spawn the instance on the hypervisor. [ 676.597055] env[69328]: DEBUG nova.compute.manager [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 676.597987] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a0faa3a-4d0c-459e-9452-93001dacc21e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.646102] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Acquiring lock "refresh_cache-c3673531-9167-4d33-b8ce-d6afa5e589bc" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 676.646337] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Acquired lock "refresh_cache-c3673531-9167-4d33-b8ce-d6afa5e589bc" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 676.646445] env[69328]: DEBUG nova.network.neutron [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] [instance: c3673531-9167-4d33-b8ce-d6afa5e589bc] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 676.699614] env[69328]: DEBUG oslo_vmware.api [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3272911, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.481874} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.699985] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] bbbfb48d-b474-4a6e-9078-336f23d2c343/bbbfb48d-b474-4a6e-9078-336f23d2c343.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 676.700145] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 676.700454] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f1a6a568-fc29-43a0-9072-2f0e97633134 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.709318] env[69328]: DEBUG oslo_vmware.api [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Waiting for the task: (returnval){ [ 676.709318] env[69328]: value = "task-3272913" [ 676.709318] env[69328]: _type = "Task" [ 676.709318] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.719716] env[69328]: DEBUG oslo_vmware.api [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3272913, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.975554] env[69328]: DEBUG oslo_vmware.api [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3272912, 'name': Rename_Task, 'duration_secs': 0.295089} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.975828] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 676.976091] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1739d81a-2ab3-42d2-938b-2ac130fc8663 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.983081] env[69328]: DEBUG oslo_vmware.api [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for the task: (returnval){ [ 676.983081] env[69328]: value = "task-3272914" [ 676.983081] env[69328]: _type = "Task" [ 676.983081] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.995328] env[69328]: DEBUG oslo_vmware.api [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3272914, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.109258] env[69328]: DEBUG nova.network.neutron [req-b1fa59be-0b98-4b71-b3e9-32974920e776 req-47781baa-be26-4bef-aad2-779ad11ff622 service nova] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Updated VIF entry in instance network info cache for port f66bf51c-4ffe-4da8-a8d8-6b3db6ee56e0. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 677.110480] env[69328]: DEBUG nova.network.neutron [req-b1fa59be-0b98-4b71-b3e9-32974920e776 req-47781baa-be26-4bef-aad2-779ad11ff622 service nova] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Updating instance_info_cache with network_info: [{"id": "f66bf51c-4ffe-4da8-a8d8-6b3db6ee56e0", "address": "fa:16:3e:eb:6a:37", "network": {"id": "4707aea3-ce46-4fe0-bf5c-141ee5596a86", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.166", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ecd184c6b78b4e4297fb93abb94aa37d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf66bf51c-4f", "ovs_interfaceid": "f66bf51c-4ffe-4da8-a8d8-6b3db6ee56e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 677.121505] env[69328]: INFO nova.compute.manager [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] Took 41.93 seconds to build instance. [ 677.200604] env[69328]: DEBUG nova.network.neutron [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] [instance: c3673531-9167-4d33-b8ce-d6afa5e589bc] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 677.219123] env[69328]: DEBUG oslo_vmware.api [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3272913, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08527} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.224444] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 677.227378] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bebecd3c-b3c7-4c76-a546-2b08e63cb8da {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.259524] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Reconfiguring VM instance instance-00000019 to attach disk [datastore2] bbbfb48d-b474-4a6e-9078-336f23d2c343/bbbfb48d-b474-4a6e-9078-336f23d2c343.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 677.262689] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-16e05c89-6839-4019-8a79-9f8ce02d9781 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.284891] env[69328]: DEBUG oslo_vmware.api [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Waiting for the task: (returnval){ [ 677.284891] env[69328]: value = "task-3272915" [ 677.284891] env[69328]: _type = "Task" [ 677.284891] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.296793] env[69328]: DEBUG oslo_vmware.api [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3272915, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.328564] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3893659-5325-427a-b2b7-a945944b95e9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.336643] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5820a203-f44b-4299-8124-283b07af7b97 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.372223] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3206962-741a-4d27-be79-86487d1277b7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.381244] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-635b947d-25c1-4fd9-8ce1-1ca6403e7960 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.398277] env[69328]: DEBUG nova.compute.provider_tree [None req-da766223-5256-485d-87d7-8ec2ca958509 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 677.428546] env[69328]: DEBUG nova.network.neutron [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] [instance: c3673531-9167-4d33-b8ce-d6afa5e589bc] Updating instance_info_cache with network_info: [{"id": "5deae7a2-4461-4670-a9f6-fda626bae672", "address": "fa:16:3e:a8:5d:8b", "network": {"id": "d7eabd44-9ba0-4fe3-8119-c3ff517cd131", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1444740660-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0ea10f2d436a4d758f514b2457ca195f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5deae7a2-44", "ovs_interfaceid": "5deae7a2-4461-4670-a9f6-fda626bae672", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 677.454639] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 677.456008] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 677.499541] env[69328]: DEBUG oslo_vmware.api [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3272914, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.619349] env[69328]: DEBUG oslo_concurrency.lockutils [req-b1fa59be-0b98-4b71-b3e9-32974920e776 req-47781baa-be26-4bef-aad2-779ad11ff622 service nova] Releasing lock "refresh_cache-bbbfb48d-b474-4a6e-9078-336f23d2c343" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 677.619790] env[69328]: DEBUG nova.compute.manager [req-b1fa59be-0b98-4b71-b3e9-32974920e776 req-47781baa-be26-4bef-aad2-779ad11ff622 service nova] [instance: c3673531-9167-4d33-b8ce-d6afa5e589bc] Received event network-vif-plugged-5deae7a2-4461-4670-a9f6-fda626bae672 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 677.620086] env[69328]: DEBUG oslo_concurrency.lockutils [req-b1fa59be-0b98-4b71-b3e9-32974920e776 req-47781baa-be26-4bef-aad2-779ad11ff622 service nova] Acquiring lock "c3673531-9167-4d33-b8ce-d6afa5e589bc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 677.620317] env[69328]: DEBUG oslo_concurrency.lockutils [req-b1fa59be-0b98-4b71-b3e9-32974920e776 req-47781baa-be26-4bef-aad2-779ad11ff622 service nova] Lock "c3673531-9167-4d33-b8ce-d6afa5e589bc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 677.620522] env[69328]: DEBUG oslo_concurrency.lockutils [req-b1fa59be-0b98-4b71-b3e9-32974920e776 req-47781baa-be26-4bef-aad2-779ad11ff622 service nova] Lock "c3673531-9167-4d33-b8ce-d6afa5e589bc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 677.620733] env[69328]: DEBUG nova.compute.manager [req-b1fa59be-0b98-4b71-b3e9-32974920e776 req-47781baa-be26-4bef-aad2-779ad11ff622 service nova] [instance: c3673531-9167-4d33-b8ce-d6afa5e589bc] No waiting events found dispatching network-vif-plugged-5deae7a2-4461-4670-a9f6-fda626bae672 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 677.620911] env[69328]: WARNING nova.compute.manager [req-b1fa59be-0b98-4b71-b3e9-32974920e776 req-47781baa-be26-4bef-aad2-779ad11ff622 service nova] [instance: c3673531-9167-4d33-b8ce-d6afa5e589bc] Received unexpected event network-vif-plugged-5deae7a2-4461-4670-a9f6-fda626bae672 for instance with vm_state building and task_state spawning. [ 677.624143] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a0fce03d-57b3-46d0-a4ce-36b7b57d33c8 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Lock "84baf472-6eb5-4c92-98eb-e35c14bca4e2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.966s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 677.795649] env[69328]: DEBUG oslo_vmware.api [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3272915, 'name': ReconfigVM_Task, 'duration_secs': 0.31929} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.796063] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Reconfigured VM instance instance-00000019 to attach disk [datastore2] bbbfb48d-b474-4a6e-9078-336f23d2c343/bbbfb48d-b474-4a6e-9078-336f23d2c343.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 677.796560] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-97a7e85a-fc59-42d5-9637-14224ac542f2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.803220] env[69328]: DEBUG oslo_vmware.api [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Waiting for the task: (returnval){ [ 677.803220] env[69328]: value = "task-3272916" [ 677.803220] env[69328]: _type = "Task" [ 677.803220] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.811603] env[69328]: DEBUG oslo_vmware.api [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3272916, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.923331] env[69328]: ERROR nova.scheduler.client.report [None req-da766223-5256-485d-87d7-8ec2ca958509 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] [req-3f3c6890-ee23-48c4-8c76-faf4bd4e1635] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-3f3c6890-ee23-48c4-8c76-faf4bd4e1635"}]} [ 677.933997] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Releasing lock "refresh_cache-c3673531-9167-4d33-b8ce-d6afa5e589bc" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 677.933997] env[69328]: DEBUG nova.compute.manager [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] [instance: c3673531-9167-4d33-b8ce-d6afa5e589bc] Instance network_info: |[{"id": "5deae7a2-4461-4670-a9f6-fda626bae672", "address": "fa:16:3e:a8:5d:8b", "network": {"id": "d7eabd44-9ba0-4fe3-8119-c3ff517cd131", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1444740660-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0ea10f2d436a4d758f514b2457ca195f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5deae7a2-44", "ovs_interfaceid": "5deae7a2-4461-4670-a9f6-fda626bae672", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 677.934447] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] [instance: c3673531-9167-4d33-b8ce-d6afa5e589bc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a8:5d:8b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7043ca7a-807c-4c7b-b646-23ffece188b2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5deae7a2-4461-4670-a9f6-fda626bae672', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 677.941909] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Creating folder: Project (0ea10f2d436a4d758f514b2457ca195f). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 677.943096] env[69328]: DEBUG nova.scheduler.client.report [None req-da766223-5256-485d-87d7-8ec2ca958509 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Refreshing inventories for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 677.944998] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-06118947-c315-44ff-b5d1-e038c6debff1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.954412] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Created folder: Project (0ea10f2d436a4d758f514b2457ca195f) in parent group-v653649. [ 677.955030] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Creating folder: Instances. Parent ref: group-v653729. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 677.955030] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-45591734-b505-4058-bd3b-dbf6f56b5c05 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.969706] env[69328]: DEBUG nova.scheduler.client.report [None req-da766223-5256-485d-87d7-8ec2ca958509 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Updating ProviderTree inventory for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 677.969706] env[69328]: DEBUG nova.compute.provider_tree [None req-da766223-5256-485d-87d7-8ec2ca958509 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 677.972293] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Created folder: Instances in parent group-v653729. [ 677.972293] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 677.972748] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c3673531-9167-4d33-b8ce-d6afa5e589bc] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 677.972857] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 677.973846] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d9af8669-75fa-4dcd-9294-f8b8b705c95c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.987169] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 677.988371] env[69328]: DEBUG nova.scheduler.client.report [None req-da766223-5256-485d-87d7-8ec2ca958509 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Refreshing aggregate associations for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e, aggregates: None {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 677.990716] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 677.994830] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 677.994830] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5f40360e-b9a9-483b-a7da-7a56b4040d64 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Acquiring lock "84baf472-6eb5-4c92-98eb-e35c14bca4e2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 677.995146] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5f40360e-b9a9-483b-a7da-7a56b4040d64 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Lock "84baf472-6eb5-4c92-98eb-e35c14bca4e2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 677.995236] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5f40360e-b9a9-483b-a7da-7a56b4040d64 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Acquiring lock "84baf472-6eb5-4c92-98eb-e35c14bca4e2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 677.996319] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5f40360e-b9a9-483b-a7da-7a56b4040d64 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Lock "84baf472-6eb5-4c92-98eb-e35c14bca4e2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 677.996319] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5f40360e-b9a9-483b-a7da-7a56b4040d64 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Lock "84baf472-6eb5-4c92-98eb-e35c14bca4e2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 677.997374] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 677.998949] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 677.999168] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69328) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 677.999373] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 677.999373] env[69328]: value = "task-3272920" [ 677.999373] env[69328]: _type = "Task" [ 677.999373] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.999838] env[69328]: INFO nova.compute.manager [None req-5f40360e-b9a9-483b-a7da-7a56b4040d64 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] Terminating instance [ 678.001476] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager.update_available_resource {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 678.009016] env[69328]: DEBUG oslo_vmware.api [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3272914, 'name': PowerOnVM_Task, 'duration_secs': 0.554015} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.009673] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 678.009867] env[69328]: INFO nova.compute.manager [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Took 8.62 seconds to spawn the instance on the hypervisor. [ 678.010024] env[69328]: DEBUG nova.compute.manager [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 678.011155] env[69328]: DEBUG nova.scheduler.client.report [None req-da766223-5256-485d-87d7-8ec2ca958509 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Refreshing trait associations for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e, traits: COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 678.014198] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a7ee685-06ee-42e3-8ec7-8ae38d3ea229 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.022172] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272920, 'name': CreateVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.126158] env[69328]: DEBUG nova.compute.manager [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 678.321558] env[69328]: DEBUG oslo_vmware.api [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3272916, 'name': Rename_Task, 'duration_secs': 0.194921} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.321670] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 678.323200] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3d6d278e-ab89-47e4-bf2e-dc1c05a3e582 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.329583] env[69328]: DEBUG nova.compute.manager [req-ffd3730c-5715-41d3-9bd5-990699f733cf req-7e6bd75f-63e2-4153-9564-ef3b40447d45 service nova] [instance: c3673531-9167-4d33-b8ce-d6afa5e589bc] Received event network-changed-5deae7a2-4461-4670-a9f6-fda626bae672 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 678.329776] env[69328]: DEBUG nova.compute.manager [req-ffd3730c-5715-41d3-9bd5-990699f733cf req-7e6bd75f-63e2-4153-9564-ef3b40447d45 service nova] [instance: c3673531-9167-4d33-b8ce-d6afa5e589bc] Refreshing instance network info cache due to event network-changed-5deae7a2-4461-4670-a9f6-fda626bae672. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 678.330222] env[69328]: DEBUG oslo_concurrency.lockutils [req-ffd3730c-5715-41d3-9bd5-990699f733cf req-7e6bd75f-63e2-4153-9564-ef3b40447d45 service nova] Acquiring lock "refresh_cache-c3673531-9167-4d33-b8ce-d6afa5e589bc" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 678.330222] env[69328]: DEBUG oslo_concurrency.lockutils [req-ffd3730c-5715-41d3-9bd5-990699f733cf req-7e6bd75f-63e2-4153-9564-ef3b40447d45 service nova] Acquired lock "refresh_cache-c3673531-9167-4d33-b8ce-d6afa5e589bc" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 678.330332] env[69328]: DEBUG nova.network.neutron [req-ffd3730c-5715-41d3-9bd5-990699f733cf req-7e6bd75f-63e2-4153-9564-ef3b40447d45 service nova] [instance: c3673531-9167-4d33-b8ce-d6afa5e589bc] Refreshing network info cache for port 5deae7a2-4461-4670-a9f6-fda626bae672 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 678.333282] env[69328]: DEBUG oslo_vmware.api [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Waiting for the task: (returnval){ [ 678.333282] env[69328]: value = "task-3272921" [ 678.333282] env[69328]: _type = "Task" [ 678.333282] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.345864] env[69328]: DEBUG oslo_vmware.api [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3272921, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.510612] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 678.511306] env[69328]: DEBUG nova.compute.manager [None req-5f40360e-b9a9-483b-a7da-7a56b4040d64 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 678.511498] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-5f40360e-b9a9-483b-a7da-7a56b4040d64 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 678.512432] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16850439-3894-4934-8237-afc9cc7153ab {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.518832] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272920, 'name': CreateVM_Task, 'duration_secs': 0.464011} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.519364] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c3673531-9167-4d33-b8ce-d6afa5e589bc] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 678.522210] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 678.522210] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 678.522210] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 678.524412] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d894f413-a9cb-43b1-9626-64536ad86174 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.526163] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f40360e-b9a9-483b-a7da-7a56b4040d64 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 678.528695] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f6413b03-5be6-4ed9-90dc-ce5be752ca0a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.537734] env[69328]: DEBUG oslo_vmware.api [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Waiting for the task: (returnval){ [ 678.537734] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52658454-4e6a-4c6f-6bc1-1a9df86a3951" [ 678.537734] env[69328]: _type = "Task" [ 678.537734] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.544455] env[69328]: DEBUG oslo_vmware.api [None req-5f40360e-b9a9-483b-a7da-7a56b4040d64 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Waiting for the task: (returnval){ [ 678.544455] env[69328]: value = "task-3272922" [ 678.544455] env[69328]: _type = "Task" [ 678.544455] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.545047] env[69328]: INFO nova.compute.manager [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Took 42.74 seconds to build instance. [ 678.556163] env[69328]: DEBUG oslo_vmware.api [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52658454-4e6a-4c6f-6bc1-1a9df86a3951, 'name': SearchDatastore_Task, 'duration_secs': 0.012961} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.556843] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 678.557157] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] [instance: c3673531-9167-4d33-b8ce-d6afa5e589bc] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 678.557769] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 678.557769] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 678.557769] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 678.558108] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d69faaef-e3c4-4131-a4cd-44c14a4b4c1a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.565392] env[69328]: DEBUG oslo_vmware.api [None req-5f40360e-b9a9-483b-a7da-7a56b4040d64 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Task: {'id': task-3272922, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.577188] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 678.577404] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 678.578169] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bfb48212-63da-408d-afa6-a8de83e2a95f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.588052] env[69328]: DEBUG oslo_vmware.api [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Waiting for the task: (returnval){ [ 678.588052] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5243d53a-bfb6-b3be-0532-709f164b5116" [ 678.588052] env[69328]: _type = "Task" [ 678.588052] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.595827] env[69328]: DEBUG oslo_vmware.api [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5243d53a-bfb6-b3be-0532-709f164b5116, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.601019] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6605f4b-ace7-49b0-a0b5-690dba12a1c7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.605690] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aabfdf8f-cf7f-4cea-9ca3-d3645b3a6d39 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.640921] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8716a633-4cd8-4315-a0be-db4effb08d22 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.653631] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fba09058-69b8-4cb4-a086-d39d27f42b19 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.659243] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 678.669412] env[69328]: DEBUG nova.compute.provider_tree [None req-da766223-5256-485d-87d7-8ec2ca958509 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 678.849602] env[69328]: DEBUG oslo_vmware.api [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3272921, 'name': PowerOnVM_Task} progress is 90%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.051082] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7f7e7cb3-a69c-46cd-904f-bcebbb477cf8 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "bc9c3a41-7264-4d69-bc15-397b5fa0a8ad" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.378s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 679.061617] env[69328]: DEBUG oslo_vmware.api [None req-5f40360e-b9a9-483b-a7da-7a56b4040d64 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Task: {'id': task-3272922, 'name': PowerOffVM_Task, 'duration_secs': 0.249342} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.063681] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f40360e-b9a9-483b-a7da-7a56b4040d64 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 679.063978] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-5f40360e-b9a9-483b-a7da-7a56b4040d64 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 679.066577] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-959ba300-b070-43c5-90ad-a306a638defa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.097936] env[69328]: DEBUG oslo_vmware.api [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5243d53a-bfb6-b3be-0532-709f164b5116, 'name': SearchDatastore_Task, 'duration_secs': 0.012059} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.099168] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b389273-1dc9-4284-a449-a95887bd5bfd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.105950] env[69328]: DEBUG oslo_vmware.api [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Waiting for the task: (returnval){ [ 679.105950] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]526ca4e0-b2ba-1327-3f8e-d701c4023d5a" [ 679.105950] env[69328]: _type = "Task" [ 679.105950] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.117217] env[69328]: DEBUG oslo_vmware.api [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]526ca4e0-b2ba-1327-3f8e-d701c4023d5a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.137437] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-5f40360e-b9a9-483b-a7da-7a56b4040d64 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 679.138127] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-5f40360e-b9a9-483b-a7da-7a56b4040d64 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 679.138330] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f40360e-b9a9-483b-a7da-7a56b4040d64 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Deleting the datastore file [datastore1] 84baf472-6eb5-4c92-98eb-e35c14bca4e2 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 679.138610] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f3d47b1b-2f9d-42ed-b220-df429ab810a8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.145694] env[69328]: DEBUG oslo_vmware.api [None req-5f40360e-b9a9-483b-a7da-7a56b4040d64 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Waiting for the task: (returnval){ [ 679.145694] env[69328]: value = "task-3272924" [ 679.145694] env[69328]: _type = "Task" [ 679.145694] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.153941] env[69328]: DEBUG oslo_vmware.api [None req-5f40360e-b9a9-483b-a7da-7a56b4040d64 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Task: {'id': task-3272924, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.171377] env[69328]: DEBUG nova.scheduler.client.report [None req-da766223-5256-485d-87d7-8ec2ca958509 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 679.261936] env[69328]: DEBUG nova.network.neutron [req-ffd3730c-5715-41d3-9bd5-990699f733cf req-7e6bd75f-63e2-4153-9564-ef3b40447d45 service nova] [instance: c3673531-9167-4d33-b8ce-d6afa5e589bc] Updated VIF entry in instance network info cache for port 5deae7a2-4461-4670-a9f6-fda626bae672. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 679.262459] env[69328]: DEBUG nova.network.neutron [req-ffd3730c-5715-41d3-9bd5-990699f733cf req-7e6bd75f-63e2-4153-9564-ef3b40447d45 service nova] [instance: c3673531-9167-4d33-b8ce-d6afa5e589bc] Updating instance_info_cache with network_info: [{"id": "5deae7a2-4461-4670-a9f6-fda626bae672", "address": "fa:16:3e:a8:5d:8b", "network": {"id": "d7eabd44-9ba0-4fe3-8119-c3ff517cd131", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1444740660-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0ea10f2d436a4d758f514b2457ca195f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7043ca7a-807c-4c7b-b646-23ffece188b2", "external-id": "nsx-vlan-transportzone-619", "segmentation_id": 619, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5deae7a2-44", "ovs_interfaceid": "5deae7a2-4461-4670-a9f6-fda626bae672", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 679.345987] env[69328]: DEBUG oslo_vmware.api [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3272921, 'name': PowerOnVM_Task, 'duration_secs': 0.580751} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.346286] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 679.346488] env[69328]: INFO nova.compute.manager [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Took 7.09 seconds to spawn the instance on the hypervisor. [ 679.346666] env[69328]: DEBUG nova.compute.manager [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 679.347484] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cee06d22-bb60-48a7-9ec6-469fad66ca49 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.553827] env[69328]: DEBUG nova.compute.manager [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 679.619408] env[69328]: DEBUG oslo_vmware.api [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]526ca4e0-b2ba-1327-3f8e-d701c4023d5a, 'name': SearchDatastore_Task, 'duration_secs': 0.01386} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.619730] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 679.620242] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] c3673531-9167-4d33-b8ce-d6afa5e589bc/c3673531-9167-4d33-b8ce-d6afa5e589bc.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 679.620884] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c1397f12-0f80-4026-b2ef-2d2fa301a1ef {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.630846] env[69328]: DEBUG oslo_vmware.api [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Waiting for the task: (returnval){ [ 679.630846] env[69328]: value = "task-3272925" [ 679.630846] env[69328]: _type = "Task" [ 679.630846] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.641546] env[69328]: DEBUG oslo_vmware.api [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Task: {'id': task-3272925, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.655598] env[69328]: DEBUG oslo_vmware.api [None req-5f40360e-b9a9-483b-a7da-7a56b4040d64 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Task: {'id': task-3272924, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.491054} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.656065] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f40360e-b9a9-483b-a7da-7a56b4040d64 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 679.656284] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-5f40360e-b9a9-483b-a7da-7a56b4040d64 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 679.656460] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-5f40360e-b9a9-483b-a7da-7a56b4040d64 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 679.656633] env[69328]: INFO nova.compute.manager [None req-5f40360e-b9a9-483b-a7da-7a56b4040d64 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] Took 1.15 seconds to destroy the instance on the hypervisor. [ 679.656872] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5f40360e-b9a9-483b-a7da-7a56b4040d64 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 679.657183] env[69328]: DEBUG nova.compute.manager [-] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 679.657183] env[69328]: DEBUG nova.network.neutron [-] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 679.678737] env[69328]: DEBUG oslo_concurrency.lockutils [None req-da766223-5256-485d-87d7-8ec2ca958509 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.495s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 679.681087] env[69328]: DEBUG oslo_concurrency.lockutils [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.504s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 679.682740] env[69328]: INFO nova.compute.claims [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 679.720230] env[69328]: INFO nova.scheduler.client.report [None req-da766223-5256-485d-87d7-8ec2ca958509 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Deleted allocations for instance 7b348a95-3ab2-4112-87e3-b17504c0a302 [ 679.766520] env[69328]: DEBUG oslo_concurrency.lockutils [req-ffd3730c-5715-41d3-9bd5-990699f733cf req-7e6bd75f-63e2-4153-9564-ef3b40447d45 service nova] Releasing lock "refresh_cache-c3673531-9167-4d33-b8ce-d6afa5e589bc" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 679.871129] env[69328]: INFO nova.compute.manager [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Took 42.93 seconds to build instance. [ 680.084191] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 680.144140] env[69328]: DEBUG oslo_vmware.api [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Task: {'id': task-3272925, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.231387] env[69328]: DEBUG oslo_concurrency.lockutils [None req-da766223-5256-485d-87d7-8ec2ca958509 tempest-TenantUsagesTestJSON-972720670 tempest-TenantUsagesTestJSON-972720670-project-member] Lock "7b348a95-3ab2-4112-87e3-b17504c0a302" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.904s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 680.373716] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a8c0b15e-ee00-49f1-b90a-96d1768ce898 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Lock "bbbfb48d-b474-4a6e-9078-336f23d2c343" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.239s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 680.395854] env[69328]: DEBUG nova.compute.manager [req-7434c96d-f5a0-42d7-89b3-3e3ae953a0d8 req-f3887f44-2574-4158-95c5-ef53730bcefc service nova] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Received event network-changed-e8f19fa7-2ac8-47ea-bd97-b8bcdc8f3ed6 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 680.396681] env[69328]: DEBUG nova.compute.manager [req-7434c96d-f5a0-42d7-89b3-3e3ae953a0d8 req-f3887f44-2574-4158-95c5-ef53730bcefc service nova] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Refreshing instance network info cache due to event network-changed-e8f19fa7-2ac8-47ea-bd97-b8bcdc8f3ed6. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 680.400019] env[69328]: DEBUG oslo_concurrency.lockutils [req-7434c96d-f5a0-42d7-89b3-3e3ae953a0d8 req-f3887f44-2574-4158-95c5-ef53730bcefc service nova] Acquiring lock "refresh_cache-bc9c3a41-7264-4d69-bc15-397b5fa0a8ad" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 680.400019] env[69328]: DEBUG oslo_concurrency.lockutils [req-7434c96d-f5a0-42d7-89b3-3e3ae953a0d8 req-f3887f44-2574-4158-95c5-ef53730bcefc service nova] Acquired lock "refresh_cache-bc9c3a41-7264-4d69-bc15-397b5fa0a8ad" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 680.400019] env[69328]: DEBUG nova.network.neutron [req-7434c96d-f5a0-42d7-89b3-3e3ae953a0d8 req-f3887f44-2574-4158-95c5-ef53730bcefc service nova] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Refreshing network info cache for port e8f19fa7-2ac8-47ea-bd97-b8bcdc8f3ed6 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 680.471545] env[69328]: DEBUG nova.compute.manager [req-bea3cea7-7faa-4f77-94b7-242783170489 req-d48b2ce7-ca70-497e-b46c-7934b546db02 service nova] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] Received event network-vif-deleted-eb83e86c-619b-4c67-a535-7ecc49d15ff2 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 680.472479] env[69328]: INFO nova.compute.manager [req-bea3cea7-7faa-4f77-94b7-242783170489 req-d48b2ce7-ca70-497e-b46c-7934b546db02 service nova] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] Neutron deleted interface eb83e86c-619b-4c67-a535-7ecc49d15ff2; detaching it from the instance and deleting it from the info cache [ 680.472479] env[69328]: DEBUG nova.network.neutron [req-bea3cea7-7faa-4f77-94b7-242783170489 req-d48b2ce7-ca70-497e-b46c-7934b546db02 service nova] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 680.644725] env[69328]: DEBUG oslo_vmware.api [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Task: {'id': task-3272925, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.726472} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.645731] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] c3673531-9167-4d33-b8ce-d6afa5e589bc/c3673531-9167-4d33-b8ce-d6afa5e589bc.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 680.645731] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] [instance: c3673531-9167-4d33-b8ce-d6afa5e589bc] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 680.645731] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7bffc529-f5cb-4909-a6db-558bf44efeec {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.655300] env[69328]: DEBUG oslo_vmware.api [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Waiting for the task: (returnval){ [ 680.655300] env[69328]: value = "task-3272927" [ 680.655300] env[69328]: _type = "Task" [ 680.655300] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.663601] env[69328]: DEBUG oslo_vmware.api [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Task: {'id': task-3272927, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.777959] env[69328]: DEBUG nova.network.neutron [-] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 680.879080] env[69328]: DEBUG nova.compute.manager [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 680.980041] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-795f4997-f03c-4f59-8e89-6630eaebb011 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.001483] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d3cdb65-aa3a-4180-9e02-ca376c1a95d3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.060528] env[69328]: DEBUG nova.compute.manager [req-bea3cea7-7faa-4f77-94b7-242783170489 req-d48b2ce7-ca70-497e-b46c-7934b546db02 service nova] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] Detach interface failed, port_id=eb83e86c-619b-4c67-a535-7ecc49d15ff2, reason: Instance 84baf472-6eb5-4c92-98eb-e35c14bca4e2 could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 681.180910] env[69328]: DEBUG oslo_vmware.api [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Task: {'id': task-3272927, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.149719} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.184617] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] [instance: c3673531-9167-4d33-b8ce-d6afa5e589bc] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 681.185804] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f14768d-f563-4c4e-bd91-204cb408ff5c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.212704] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] [instance: c3673531-9167-4d33-b8ce-d6afa5e589bc] Reconfiguring VM instance instance-0000001a to attach disk [datastore1] c3673531-9167-4d33-b8ce-d6afa5e589bc/c3673531-9167-4d33-b8ce-d6afa5e589bc.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 681.216419] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-15e9fc0a-ea1b-4fc6-8415-b2b62caae8e1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.241872] env[69328]: DEBUG oslo_vmware.api [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Waiting for the task: (returnval){ [ 681.241872] env[69328]: value = "task-3272928" [ 681.241872] env[69328]: _type = "Task" [ 681.241872] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.261610] env[69328]: DEBUG oslo_vmware.api [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Task: {'id': task-3272928, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.282817] env[69328]: INFO nova.compute.manager [-] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] Took 1.63 seconds to deallocate network for instance. [ 681.301178] env[69328]: DEBUG nova.network.neutron [req-7434c96d-f5a0-42d7-89b3-3e3ae953a0d8 req-f3887f44-2574-4158-95c5-ef53730bcefc service nova] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Updated VIF entry in instance network info cache for port e8f19fa7-2ac8-47ea-bd97-b8bcdc8f3ed6. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 681.301828] env[69328]: DEBUG nova.network.neutron [req-7434c96d-f5a0-42d7-89b3-3e3ae953a0d8 req-f3887f44-2574-4158-95c5-ef53730bcefc service nova] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Updating instance_info_cache with network_info: [{"id": "e8f19fa7-2ac8-47ea-bd97-b8bcdc8f3ed6", "address": "fa:16:3e:64:7f:a9", "network": {"id": "2e8bdd1b-0cca-4f7c-bba3-ff1750073020", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2058593014-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.186", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "088bc9e3aeb449baa0a522342d57d183", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8f19fa7-2a", "ovs_interfaceid": "e8f19fa7-2ac8-47ea-bd97-b8bcdc8f3ed6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 681.404143] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 681.442516] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-173b4a6f-2fd8-4d7f-9d88-744bc9c8a96e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.453013] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db9332f7-16fd-4864-a130-8d983dee9c57 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.485870] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14865c99-4ced-490e-9117-40bf3d26934d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.495164] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c495c41e-e522-420c-b24e-86f732bbd100 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.511247] env[69328]: DEBUG nova.compute.provider_tree [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 681.753824] env[69328]: DEBUG oslo_vmware.api [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Task: {'id': task-3272928, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.793233] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5f40360e-b9a9-483b-a7da-7a56b4040d64 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 681.804203] env[69328]: DEBUG oslo_concurrency.lockutils [req-7434c96d-f5a0-42d7-89b3-3e3ae953a0d8 req-f3887f44-2574-4158-95c5-ef53730bcefc service nova] Releasing lock "refresh_cache-bc9c3a41-7264-4d69-bc15-397b5fa0a8ad" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 682.035024] env[69328]: ERROR nova.scheduler.client.report [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [req-8f827b07-77a3-4503-97da-d28705f5d298] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-8f827b07-77a3-4503-97da-d28705f5d298"}]} [ 682.064709] env[69328]: DEBUG nova.scheduler.client.report [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Refreshing inventories for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 682.079472] env[69328]: DEBUG nova.scheduler.client.report [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Updating ProviderTree inventory for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 682.079719] env[69328]: DEBUG nova.compute.provider_tree [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 682.097473] env[69328]: DEBUG nova.scheduler.client.report [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Refreshing aggregate associations for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e, aggregates: None {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 682.118449] env[69328]: DEBUG nova.scheduler.client.report [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Refreshing trait associations for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e, traits: COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 682.162948] env[69328]: DEBUG nova.compute.manager [None req-de24d6d0-9871-4285-8906-8b9e68839917 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 682.163862] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a80c1fe6-708f-485e-90bb-05270782c483 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.256206] env[69328]: DEBUG oslo_vmware.api [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Task: {'id': task-3272928, 'name': ReconfigVM_Task, 'duration_secs': 0.615973} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.259623] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] [instance: c3673531-9167-4d33-b8ce-d6afa5e589bc] Reconfigured VM instance instance-0000001a to attach disk [datastore1] c3673531-9167-4d33-b8ce-d6afa5e589bc/c3673531-9167-4d33-b8ce-d6afa5e589bc.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 682.260912] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-17e86c11-eb1b-4501-950c-5962746c96e0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.268722] env[69328]: DEBUG oslo_vmware.api [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Waiting for the task: (returnval){ [ 682.268722] env[69328]: value = "task-3272929" [ 682.268722] env[69328]: _type = "Task" [ 682.268722] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.278058] env[69328]: DEBUG oslo_vmware.api [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Task: {'id': task-3272929, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.512333] env[69328]: DEBUG oslo_concurrency.lockutils [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Acquiring lock "d724a141-35e7-4483-99aa-8a17066fb63b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 682.512333] env[69328]: DEBUG oslo_concurrency.lockutils [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Lock "d724a141-35e7-4483-99aa-8a17066fb63b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 682.670638] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af3de3bf-f994-43fe-a97d-ed2e9ff43b23 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.680948] env[69328]: INFO nova.compute.manager [None req-de24d6d0-9871-4285-8906-8b9e68839917 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] instance snapshotting [ 682.682929] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e9e1e4f-4412-43a8-a2e4-f19f997bce0f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.687610] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fc81f88-792e-4b8e-9fa5-550ad197961b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.733958] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c90eba99-bd0c-447d-87e9-0f5a981453bd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.737316] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a78999f-37da-4a6f-adbe-50bea02f6fa1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.750290] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d3af60a-3630-42a3-ae05-1d3234f7c5a1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.766576] env[69328]: DEBUG nova.compute.provider_tree [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 682.778131] env[69328]: DEBUG oslo_vmware.api [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Task: {'id': task-3272929, 'name': Rename_Task, 'duration_secs': 0.404376} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.779345] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] [instance: c3673531-9167-4d33-b8ce-d6afa5e589bc] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 682.779507] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c83830eb-24e1-4a1a-ab5d-9c496820960e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.786784] env[69328]: DEBUG oslo_vmware.api [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Waiting for the task: (returnval){ [ 682.786784] env[69328]: value = "task-3272930" [ 682.786784] env[69328]: _type = "Task" [ 682.786784] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.795428] env[69328]: DEBUG oslo_vmware.api [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Task: {'id': task-3272930, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.881082] env[69328]: DEBUG oslo_vmware.rw_handles [None req-ddde05fb-1d3a-49dd-b167-da0107f56fed tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528cbe37-e35e-7142-4b87-7855ec059e00/disk-0.vmdk. {{(pid=69328) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 682.882082] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f46a3c95-d6ab-4ce0-bd37-5f9467fbf817 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.891811] env[69328]: DEBUG oslo_vmware.rw_handles [None req-ddde05fb-1d3a-49dd-b167-da0107f56fed tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528cbe37-e35e-7142-4b87-7855ec059e00/disk-0.vmdk is in state: ready. {{(pid=69328) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 682.892162] env[69328]: ERROR oslo_vmware.rw_handles [None req-ddde05fb-1d3a-49dd-b167-da0107f56fed tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528cbe37-e35e-7142-4b87-7855ec059e00/disk-0.vmdk due to incomplete transfer. [ 682.892343] env[69328]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-be44cfed-baca-423c-b09d-1fbb67351bb7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.899406] env[69328]: DEBUG oslo_vmware.rw_handles [None req-ddde05fb-1d3a-49dd-b167-da0107f56fed tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528cbe37-e35e-7142-4b87-7855ec059e00/disk-0.vmdk. {{(pid=69328) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 682.899557] env[69328]: DEBUG nova.virt.vmwareapi.images [None req-ddde05fb-1d3a-49dd-b167-da0107f56fed tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Uploaded image e30468af-18ba-405b-9493-86d69d242d95 to the Glance image server {{(pid=69328) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 682.901711] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddde05fb-1d3a-49dd-b167-da0107f56fed tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Destroying the VM {{(pid=69328) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 682.901977] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-0386dce6-68ec-4959-8cbf-4cdebeb7da08 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.908431] env[69328]: DEBUG oslo_vmware.api [None req-ddde05fb-1d3a-49dd-b167-da0107f56fed tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Waiting for the task: (returnval){ [ 682.908431] env[69328]: value = "task-3272931" [ 682.908431] env[69328]: _type = "Task" [ 682.908431] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.917358] env[69328]: DEBUG oslo_vmware.api [None req-ddde05fb-1d3a-49dd-b167-da0107f56fed tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': task-3272931, 'name': Destroy_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.251450] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-de24d6d0-9871-4285-8906-8b9e68839917 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Creating Snapshot of the VM instance {{(pid=69328) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 683.251450] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-a469d3ec-d2cf-4327-b014-d5827d05e241 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.258938] env[69328]: DEBUG oslo_vmware.api [None req-de24d6d0-9871-4285-8906-8b9e68839917 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Waiting for the task: (returnval){ [ 683.258938] env[69328]: value = "task-3272932" [ 683.258938] env[69328]: _type = "Task" [ 683.258938] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.267351] env[69328]: DEBUG oslo_vmware.api [None req-de24d6d0-9871-4285-8906-8b9e68839917 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3272932, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.297795] env[69328]: DEBUG oslo_vmware.api [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Task: {'id': task-3272930, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.315391] env[69328]: DEBUG nova.scheduler.client.report [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Updated inventory for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with generation 51 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 683.315718] env[69328]: DEBUG nova.compute.provider_tree [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Updating resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e generation from 51 to 52 during operation: update_inventory {{(pid=69328) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 683.316215] env[69328]: DEBUG nova.compute.provider_tree [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 683.427946] env[69328]: DEBUG oslo_vmware.api [None req-ddde05fb-1d3a-49dd-b167-da0107f56fed tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': task-3272931, 'name': Destroy_Task} progress is 33%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.768488] env[69328]: DEBUG oslo_vmware.api [None req-de24d6d0-9871-4285-8906-8b9e68839917 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3272932, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.795863] env[69328]: DEBUG oslo_vmware.api [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Task: {'id': task-3272930, 'name': PowerOnVM_Task, 'duration_secs': 0.761732} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.796147] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] [instance: c3673531-9167-4d33-b8ce-d6afa5e589bc] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 683.796362] env[69328]: INFO nova.compute.manager [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] [instance: c3673531-9167-4d33-b8ce-d6afa5e589bc] Took 8.85 seconds to spawn the instance on the hypervisor. [ 683.796542] env[69328]: DEBUG nova.compute.manager [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] [instance: c3673531-9167-4d33-b8ce-d6afa5e589bc] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 683.797324] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5ae8e6a-3c6a-4464-9fea-9dfe0d49efd6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.827247] env[69328]: DEBUG oslo_concurrency.lockutils [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.146s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 683.827686] env[69328]: DEBUG nova.compute.manager [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 683.830485] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.048s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 683.831950] env[69328]: INFO nova.compute.claims [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] [instance: b7409a67-c140-436f-9c4e-27dae259f648] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 683.919729] env[69328]: DEBUG oslo_vmware.api [None req-ddde05fb-1d3a-49dd-b167-da0107f56fed tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': task-3272931, 'name': Destroy_Task, 'duration_secs': 0.661694} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.919729] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-ddde05fb-1d3a-49dd-b167-da0107f56fed tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Destroyed the VM [ 683.919729] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ddde05fb-1d3a-49dd-b167-da0107f56fed tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Deleting Snapshot of the VM instance {{(pid=69328) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 683.920276] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-02e0d0dd-ce0a-4657-ab58-5499b4d50b2c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.925533] env[69328]: DEBUG oslo_vmware.api [None req-ddde05fb-1d3a-49dd-b167-da0107f56fed tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Waiting for the task: (returnval){ [ 683.925533] env[69328]: value = "task-3272933" [ 683.925533] env[69328]: _type = "Task" [ 683.925533] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.934625] env[69328]: DEBUG oslo_vmware.api [None req-ddde05fb-1d3a-49dd-b167-da0107f56fed tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': task-3272933, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.241868] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Acquiring lock "18022645-9a2a-489e-b0b1-486165f46f14" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 684.241868] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Lock "18022645-9a2a-489e-b0b1-486165f46f14" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 684.270426] env[69328]: DEBUG oslo_vmware.api [None req-de24d6d0-9871-4285-8906-8b9e68839917 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3272932, 'name': CreateSnapshot_Task, 'duration_secs': 0.529083} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.270807] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-de24d6d0-9871-4285-8906-8b9e68839917 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Created Snapshot of the VM instance {{(pid=69328) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 684.271659] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da2bf28e-060f-47f3-b691-b7cf2008daae {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.313669] env[69328]: INFO nova.compute.manager [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] [instance: c3673531-9167-4d33-b8ce-d6afa5e589bc] Took 43.71 seconds to build instance. [ 684.338224] env[69328]: DEBUG nova.compute.utils [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 684.340263] env[69328]: DEBUG nova.compute.manager [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 684.340524] env[69328]: DEBUG nova.network.neutron [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 684.390408] env[69328]: DEBUG nova.policy [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '435c64c503c043a29f90396ad3b070d9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '87581f423dc64e4fb9fe1d51ebc68597', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 684.435485] env[69328]: DEBUG oslo_vmware.api [None req-ddde05fb-1d3a-49dd-b167-da0107f56fed tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': task-3272933, 'name': RemoveSnapshot_Task} progress is 26%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.708726] env[69328]: DEBUG nova.network.neutron [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Successfully created port: 9e189e9a-ecbf-475e-82a4-508c1a0aec74 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 684.789763] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-de24d6d0-9871-4285-8906-8b9e68839917 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Creating linked-clone VM from snapshot {{(pid=69328) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 684.793927] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-584cd44d-9fd6-4df4-a1dd-d6f94aba0ea9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.804258] env[69328]: DEBUG oslo_vmware.api [None req-de24d6d0-9871-4285-8906-8b9e68839917 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Waiting for the task: (returnval){ [ 684.804258] env[69328]: value = "task-3272934" [ 684.804258] env[69328]: _type = "Task" [ 684.804258] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.817139] env[69328]: DEBUG oslo_vmware.api [None req-de24d6d0-9871-4285-8906-8b9e68839917 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3272934, 'name': CloneVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.817139] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1533706e-89a6-473a-afef-3297244c341c tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Lock "c3673531-9167-4d33-b8ce-d6afa5e589bc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 55.677s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 684.843685] env[69328]: DEBUG nova.compute.manager [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 684.940424] env[69328]: DEBUG oslo_vmware.api [None req-ddde05fb-1d3a-49dd-b167-da0107f56fed tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': task-3272933, 'name': RemoveSnapshot_Task} progress is 26%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.316596] env[69328]: DEBUG oslo_vmware.api [None req-de24d6d0-9871-4285-8906-8b9e68839917 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3272934, 'name': CloneVM_Task} progress is 94%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.322743] env[69328]: DEBUG nova.compute.manager [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 685.380550] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-808365bf-2927-479b-909d-ad8b89a46add {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.389201] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32efb666-ccd8-4fc2-83cd-bd3093fe9ce5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.425515] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de976fdd-213b-4d0d-b959-2c3800a58097 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.436749] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53d2e2bd-0232-44cd-9013-47237d4d11d1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.443853] env[69328]: DEBUG oslo_vmware.api [None req-ddde05fb-1d3a-49dd-b167-da0107f56fed tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': task-3272933, 'name': RemoveSnapshot_Task, 'duration_secs': 1.209206} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.444517] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ddde05fb-1d3a-49dd-b167-da0107f56fed tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Deleted Snapshot of the VM instance {{(pid=69328) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 685.444771] env[69328]: INFO nova.compute.manager [None req-ddde05fb-1d3a-49dd-b167-da0107f56fed tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Took 19.30 seconds to snapshot the instance on the hypervisor. [ 685.456211] env[69328]: DEBUG nova.compute.provider_tree [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 685.815148] env[69328]: DEBUG oslo_vmware.api [None req-de24d6d0-9871-4285-8906-8b9e68839917 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3272934, 'name': CloneVM_Task} progress is 94%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.840826] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 685.855218] env[69328]: DEBUG nova.compute.manager [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 685.877266] env[69328]: DEBUG nova.virt.hardware [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 685.877534] env[69328]: DEBUG nova.virt.hardware [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 685.877707] env[69328]: DEBUG nova.virt.hardware [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 685.877903] env[69328]: DEBUG nova.virt.hardware [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 685.878071] env[69328]: DEBUG nova.virt.hardware [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 685.878228] env[69328]: DEBUG nova.virt.hardware [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 685.878492] env[69328]: DEBUG nova.virt.hardware [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 685.878678] env[69328]: DEBUG nova.virt.hardware [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 685.878862] env[69328]: DEBUG nova.virt.hardware [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 685.879133] env[69328]: DEBUG nova.virt.hardware [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 685.879337] env[69328]: DEBUG nova.virt.hardware [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 685.880229] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b57fe8c-0a46-4862-8b0f-6bd27969cfeb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.888747] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3a0b39c-2911-4503-96da-29d56387848f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.982667] env[69328]: ERROR nova.scheduler.client.report [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] [req-ed4c8cde-adf0-4c48-b9f5-c658a0f3a0d5] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-ed4c8cde-adf0-4c48-b9f5-c658a0f3a0d5"}]} [ 685.998631] env[69328]: DEBUG nova.scheduler.client.report [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Refreshing inventories for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 686.012729] env[69328]: DEBUG nova.scheduler.client.report [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Updating ProviderTree inventory for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 686.014143] env[69328]: DEBUG nova.compute.provider_tree [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 686.026753] env[69328]: DEBUG nova.scheduler.client.report [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Refreshing aggregate associations for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e, aggregates: None {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 686.045512] env[69328]: DEBUG nova.scheduler.client.report [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Refreshing trait associations for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e, traits: COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 686.083824] env[69328]: DEBUG oslo_concurrency.lockutils [None req-53e9d6fb-ac70-448d-a705-85e6c8f30036 tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Acquiring lock "c3673531-9167-4d33-b8ce-d6afa5e589bc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 686.084130] env[69328]: DEBUG oslo_concurrency.lockutils [None req-53e9d6fb-ac70-448d-a705-85e6c8f30036 tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Lock "c3673531-9167-4d33-b8ce-d6afa5e589bc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 686.084345] env[69328]: DEBUG oslo_concurrency.lockutils [None req-53e9d6fb-ac70-448d-a705-85e6c8f30036 tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Acquiring lock "c3673531-9167-4d33-b8ce-d6afa5e589bc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 686.084532] env[69328]: DEBUG oslo_concurrency.lockutils [None req-53e9d6fb-ac70-448d-a705-85e6c8f30036 tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Lock "c3673531-9167-4d33-b8ce-d6afa5e589bc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 686.084697] env[69328]: DEBUG oslo_concurrency.lockutils [None req-53e9d6fb-ac70-448d-a705-85e6c8f30036 tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Lock "c3673531-9167-4d33-b8ce-d6afa5e589bc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 686.089150] env[69328]: INFO nova.compute.manager [None req-53e9d6fb-ac70-448d-a705-85e6c8f30036 tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] [instance: c3673531-9167-4d33-b8ce-d6afa5e589bc] Terminating instance [ 686.206689] env[69328]: DEBUG nova.compute.manager [req-45b120d0-5492-45d7-bc09-3fa82edd9ebf req-eeb712e4-d8c1-4228-8091-8a35860bf66e service nova] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Received event network-vif-plugged-9e189e9a-ecbf-475e-82a4-508c1a0aec74 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 686.207012] env[69328]: DEBUG oslo_concurrency.lockutils [req-45b120d0-5492-45d7-bc09-3fa82edd9ebf req-eeb712e4-d8c1-4228-8091-8a35860bf66e service nova] Acquiring lock "b0a1441c-81e2-4131-a2ff-f5042d559d9f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 686.207197] env[69328]: DEBUG oslo_concurrency.lockutils [req-45b120d0-5492-45d7-bc09-3fa82edd9ebf req-eeb712e4-d8c1-4228-8091-8a35860bf66e service nova] Lock "b0a1441c-81e2-4131-a2ff-f5042d559d9f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 686.207314] env[69328]: DEBUG oslo_concurrency.lockutils [req-45b120d0-5492-45d7-bc09-3fa82edd9ebf req-eeb712e4-d8c1-4228-8091-8a35860bf66e service nova] Lock "b0a1441c-81e2-4131-a2ff-f5042d559d9f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 686.207518] env[69328]: DEBUG nova.compute.manager [req-45b120d0-5492-45d7-bc09-3fa82edd9ebf req-eeb712e4-d8c1-4228-8091-8a35860bf66e service nova] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] No waiting events found dispatching network-vif-plugged-9e189e9a-ecbf-475e-82a4-508c1a0aec74 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 686.207651] env[69328]: WARNING nova.compute.manager [req-45b120d0-5492-45d7-bc09-3fa82edd9ebf req-eeb712e4-d8c1-4228-8091-8a35860bf66e service nova] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Received unexpected event network-vif-plugged-9e189e9a-ecbf-475e-82a4-508c1a0aec74 for instance with vm_state building and task_state spawning. [ 686.319374] env[69328]: DEBUG nova.network.neutron [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Successfully updated port: 9e189e9a-ecbf-475e-82a4-508c1a0aec74 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 686.326310] env[69328]: DEBUG oslo_vmware.api [None req-de24d6d0-9871-4285-8906-8b9e68839917 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3272934, 'name': CloneVM_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.541625] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f5f365e-5d94-49a1-a2b4-0b0477f5b7c9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.552611] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a1bbd63-41d6-4afe-a799-a1bac75dfecf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.583890] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c47190b-9e8a-4be9-879e-fc437c7b4312 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.593531] env[69328]: DEBUG nova.compute.manager [None req-53e9d6fb-ac70-448d-a705-85e6c8f30036 tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] [instance: c3673531-9167-4d33-b8ce-d6afa5e589bc] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 686.593796] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-53e9d6fb-ac70-448d-a705-85e6c8f30036 tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] [instance: c3673531-9167-4d33-b8ce-d6afa5e589bc] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 686.594742] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-102a3ec8-24a7-4f5b-a63d-b4497fdd1488 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.598182] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27378d26-9720-48c1-82d5-b762d227d4c9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.606601] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-53e9d6fb-ac70-448d-a705-85e6c8f30036 tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] [instance: c3673531-9167-4d33-b8ce-d6afa5e589bc] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 686.614585] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bb2bce42-dfe6-4e1c-a062-f9a923bfebc2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.616790] env[69328]: DEBUG nova.compute.provider_tree [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 686.624226] env[69328]: DEBUG oslo_vmware.api [None req-53e9d6fb-ac70-448d-a705-85e6c8f30036 tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Waiting for the task: (returnval){ [ 686.624226] env[69328]: value = "task-3272935" [ 686.624226] env[69328]: _type = "Task" [ 686.624226] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.633726] env[69328]: DEBUG oslo_vmware.api [None req-53e9d6fb-ac70-448d-a705-85e6c8f30036 tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Task: {'id': task-3272935, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.817091] env[69328]: DEBUG oslo_vmware.api [None req-de24d6d0-9871-4285-8906-8b9e68839917 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3272934, 'name': CloneVM_Task, 'duration_secs': 1.607226} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.817960] env[69328]: INFO nova.virt.vmwareapi.vmops [None req-de24d6d0-9871-4285-8906-8b9e68839917 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Created linked-clone VM from snapshot [ 686.818262] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47fe7da4-c299-49b8-967a-e95dc105945e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.821906] env[69328]: DEBUG oslo_concurrency.lockutils [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "refresh_cache-b0a1441c-81e2-4131-a2ff-f5042d559d9f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 686.822090] env[69328]: DEBUG oslo_concurrency.lockutils [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquired lock "refresh_cache-b0a1441c-81e2-4131-a2ff-f5042d559d9f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 686.822262] env[69328]: DEBUG nova.network.neutron [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 686.827088] env[69328]: DEBUG nova.virt.vmwareapi.images [None req-de24d6d0-9871-4285-8906-8b9e68839917 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Uploading image d19c5a34-425c-40c8-a06e-435c94382c66 {{(pid=69328) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 686.858207] env[69328]: DEBUG oslo_vmware.rw_handles [None req-de24d6d0-9871-4285-8906-8b9e68839917 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 686.858207] env[69328]: value = "vm-653733" [ 686.858207] env[69328]: _type = "VirtualMachine" [ 686.858207] env[69328]: }. {{(pid=69328) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 686.858476] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-055fbeb3-b647-4633-8a91-06798653818b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.868624] env[69328]: DEBUG oslo_vmware.rw_handles [None req-de24d6d0-9871-4285-8906-8b9e68839917 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Lease: (returnval){ [ 686.868624] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]528b9f91-533d-7c22-127f-4c3a29244349" [ 686.868624] env[69328]: _type = "HttpNfcLease" [ 686.868624] env[69328]: } obtained for exporting VM: (result){ [ 686.868624] env[69328]: value = "vm-653733" [ 686.868624] env[69328]: _type = "VirtualMachine" [ 686.868624] env[69328]: }. {{(pid=69328) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 686.869094] env[69328]: DEBUG oslo_vmware.api [None req-de24d6d0-9871-4285-8906-8b9e68839917 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Waiting for the lease: (returnval){ [ 686.869094] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]528b9f91-533d-7c22-127f-4c3a29244349" [ 686.869094] env[69328]: _type = "HttpNfcLease" [ 686.869094] env[69328]: } to be ready. {{(pid=69328) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 686.881454] env[69328]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 686.881454] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]528b9f91-533d-7c22-127f-4c3a29244349" [ 686.881454] env[69328]: _type = "HttpNfcLease" [ 686.881454] env[69328]: } is initializing. {{(pid=69328) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 687.052657] env[69328]: DEBUG oslo_concurrency.lockutils [None req-35d1b3a3-3800-4ca4-b360-cce541dbcceb tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Acquiring lock "6102f8e6-f815-4f5f-921f-990be81fca0d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 687.052957] env[69328]: DEBUG oslo_concurrency.lockutils [None req-35d1b3a3-3800-4ca4-b360-cce541dbcceb tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Lock "6102f8e6-f815-4f5f-921f-990be81fca0d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 687.053196] env[69328]: DEBUG oslo_concurrency.lockutils [None req-35d1b3a3-3800-4ca4-b360-cce541dbcceb tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Acquiring lock "6102f8e6-f815-4f5f-921f-990be81fca0d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 687.053388] env[69328]: DEBUG oslo_concurrency.lockutils [None req-35d1b3a3-3800-4ca4-b360-cce541dbcceb tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Lock "6102f8e6-f815-4f5f-921f-990be81fca0d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 687.053561] env[69328]: DEBUG oslo_concurrency.lockutils [None req-35d1b3a3-3800-4ca4-b360-cce541dbcceb tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Lock "6102f8e6-f815-4f5f-921f-990be81fca0d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 687.056957] env[69328]: INFO nova.compute.manager [None req-35d1b3a3-3800-4ca4-b360-cce541dbcceb tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Terminating instance [ 687.137084] env[69328]: DEBUG oslo_vmware.api [None req-53e9d6fb-ac70-448d-a705-85e6c8f30036 tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Task: {'id': task-3272935, 'name': PowerOffVM_Task, 'duration_secs': 0.27234} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.137084] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-53e9d6fb-ac70-448d-a705-85e6c8f30036 tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] [instance: c3673531-9167-4d33-b8ce-d6afa5e589bc] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 687.137278] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-53e9d6fb-ac70-448d-a705-85e6c8f30036 tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] [instance: c3673531-9167-4d33-b8ce-d6afa5e589bc] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 687.137461] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e9caf80b-3588-4c4e-8626-8b307529612b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.154103] env[69328]: DEBUG nova.scheduler.client.report [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Updated inventory for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with generation 53 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 687.154381] env[69328]: DEBUG nova.compute.provider_tree [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Updating resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e generation from 53 to 54 during operation: update_inventory {{(pid=69328) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 687.154566] env[69328]: DEBUG nova.compute.provider_tree [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 687.209305] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-53e9d6fb-ac70-448d-a705-85e6c8f30036 tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] [instance: c3673531-9167-4d33-b8ce-d6afa5e589bc] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 687.209552] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-53e9d6fb-ac70-448d-a705-85e6c8f30036 tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] [instance: c3673531-9167-4d33-b8ce-d6afa5e589bc] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 687.209676] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-53e9d6fb-ac70-448d-a705-85e6c8f30036 tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Deleting the datastore file [datastore1] c3673531-9167-4d33-b8ce-d6afa5e589bc {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 687.209934] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9a999cb7-1bca-4044-b573-9881ac289346 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.220361] env[69328]: DEBUG oslo_vmware.api [None req-53e9d6fb-ac70-448d-a705-85e6c8f30036 tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Waiting for the task: (returnval){ [ 687.220361] env[69328]: value = "task-3272938" [ 687.220361] env[69328]: _type = "Task" [ 687.220361] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.229359] env[69328]: DEBUG oslo_vmware.api [None req-53e9d6fb-ac70-448d-a705-85e6c8f30036 tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Task: {'id': task-3272938, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.377550] env[69328]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 687.377550] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]528b9f91-533d-7c22-127f-4c3a29244349" [ 687.377550] env[69328]: _type = "HttpNfcLease" [ 687.377550] env[69328]: } is ready. {{(pid=69328) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 687.378102] env[69328]: DEBUG oslo_vmware.rw_handles [None req-de24d6d0-9871-4285-8906-8b9e68839917 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 687.378102] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]528b9f91-533d-7c22-127f-4c3a29244349" [ 687.378102] env[69328]: _type = "HttpNfcLease" [ 687.378102] env[69328]: }. {{(pid=69328) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 687.378772] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19ba25a4-bf73-4357-beba-209c3b93edd1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.388031] env[69328]: DEBUG oslo_vmware.rw_handles [None req-de24d6d0-9871-4285-8906-8b9e68839917 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5252e18b-15dd-06f9-6fae-70e581532381/disk-0.vmdk from lease info. {{(pid=69328) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 687.388031] env[69328]: DEBUG oslo_vmware.rw_handles [None req-de24d6d0-9871-4285-8906-8b9e68839917 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5252e18b-15dd-06f9-6fae-70e581532381/disk-0.vmdk for reading. {{(pid=69328) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 687.446933] env[69328]: DEBUG nova.network.neutron [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 687.496692] env[69328]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-7395308f-f81a-4218-86f0-7c717844a962 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.540850] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4811b5f6-3363-40c1-87f7-1c2bd797b109 tempest-VolumesAssistedSnapshotsTest-1226842710 tempest-VolumesAssistedSnapshotsTest-1226842710-project-admin] Acquiring lock "a798c3f2-ccde-488e-8a14-21f4a04f8e12" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 687.541120] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4811b5f6-3363-40c1-87f7-1c2bd797b109 tempest-VolumesAssistedSnapshotsTest-1226842710 tempest-VolumesAssistedSnapshotsTest-1226842710-project-admin] Lock "a798c3f2-ccde-488e-8a14-21f4a04f8e12" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 687.559205] env[69328]: DEBUG nova.compute.manager [None req-35d1b3a3-3800-4ca4-b360-cce541dbcceb tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 687.559436] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-35d1b3a3-3800-4ca4-b360-cce541dbcceb tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 687.560324] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36ac155c-106b-47d8-845f-a46b5f80b627 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.568929] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-35d1b3a3-3800-4ca4-b360-cce541dbcceb tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 687.569660] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-61d6a79b-d412-455f-8286-ec0426f7a574 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.577032] env[69328]: DEBUG oslo_vmware.api [None req-35d1b3a3-3800-4ca4-b360-cce541dbcceb tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Waiting for the task: (returnval){ [ 687.577032] env[69328]: value = "task-3272939" [ 687.577032] env[69328]: _type = "Task" [ 687.577032] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.588599] env[69328]: DEBUG oslo_vmware.api [None req-35d1b3a3-3800-4ca4-b360-cce541dbcceb tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': task-3272939, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.652201] env[69328]: DEBUG nova.network.neutron [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Updating instance_info_cache with network_info: [{"id": "9e189e9a-ecbf-475e-82a4-508c1a0aec74", "address": "fa:16:3e:40:7c:9a", "network": {"id": "ce3bec03-01f9-4ac9-80e4-bfb944c0bb66", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-545997027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87581f423dc64e4fb9fe1d51ebc68597", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e189e9a-ec", "ovs_interfaceid": "9e189e9a-ecbf-475e-82a4-508c1a0aec74", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 687.660553] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.830s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 687.661619] env[69328]: DEBUG nova.compute.manager [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] [instance: b7409a67-c140-436f-9c4e-27dae259f648] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 687.665387] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2943fff6-bb40-405e-a549-f2cad6459000 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.332s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 687.665781] env[69328]: DEBUG nova.objects.instance [None req-2943fff6-bb40-405e-a549-f2cad6459000 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Lazy-loading 'resources' on Instance uuid 26feb2d1-ff64-4a13-af83-b6d5fe4348e1 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 687.730360] env[69328]: DEBUG oslo_vmware.api [None req-53e9d6fb-ac70-448d-a705-85e6c8f30036 tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Task: {'id': task-3272938, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.415586} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.730715] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-53e9d6fb-ac70-448d-a705-85e6c8f30036 tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 687.731010] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-53e9d6fb-ac70-448d-a705-85e6c8f30036 tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] [instance: c3673531-9167-4d33-b8ce-d6afa5e589bc] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 687.731237] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-53e9d6fb-ac70-448d-a705-85e6c8f30036 tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] [instance: c3673531-9167-4d33-b8ce-d6afa5e589bc] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 687.731503] env[69328]: INFO nova.compute.manager [None req-53e9d6fb-ac70-448d-a705-85e6c8f30036 tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] [instance: c3673531-9167-4d33-b8ce-d6afa5e589bc] Took 1.14 seconds to destroy the instance on the hypervisor. [ 687.731790] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-53e9d6fb-ac70-448d-a705-85e6c8f30036 tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 687.732133] env[69328]: DEBUG nova.compute.manager [-] [instance: c3673531-9167-4d33-b8ce-d6afa5e589bc] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 687.732253] env[69328]: DEBUG nova.network.neutron [-] [instance: c3673531-9167-4d33-b8ce-d6afa5e589bc] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 688.045959] env[69328]: DEBUG nova.compute.utils [None req-4811b5f6-3363-40c1-87f7-1c2bd797b109 tempest-VolumesAssistedSnapshotsTest-1226842710 tempest-VolumesAssistedSnapshotsTest-1226842710-project-admin] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 688.088234] env[69328]: DEBUG oslo_vmware.api [None req-35d1b3a3-3800-4ca4-b360-cce541dbcceb tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': task-3272939, 'name': PowerOffVM_Task, 'duration_secs': 0.275788} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.088642] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-35d1b3a3-3800-4ca4-b360-cce541dbcceb tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 688.089986] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-35d1b3a3-3800-4ca4-b360-cce541dbcceb tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 688.090148] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-78fb838a-5143-4237-813c-f9fc9b5f1135 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.155896] env[69328]: DEBUG oslo_concurrency.lockutils [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Releasing lock "refresh_cache-b0a1441c-81e2-4131-a2ff-f5042d559d9f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 688.156332] env[69328]: DEBUG nova.compute.manager [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Instance network_info: |[{"id": "9e189e9a-ecbf-475e-82a4-508c1a0aec74", "address": "fa:16:3e:40:7c:9a", "network": {"id": "ce3bec03-01f9-4ac9-80e4-bfb944c0bb66", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-545997027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87581f423dc64e4fb9fe1d51ebc68597", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e189e9a-ec", "ovs_interfaceid": "9e189e9a-ecbf-475e-82a4-508c1a0aec74", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 688.156903] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:40:7c:9a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1768af3d-3317-4ef5-b484-0c2707d63de7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9e189e9a-ecbf-475e-82a4-508c1a0aec74', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 688.165683] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Creating folder: Project (87581f423dc64e4fb9fe1d51ebc68597). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 688.166532] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-66ef43ad-f4bc-4605-8b37-33038afe73cc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.171499] env[69328]: DEBUG nova.compute.utils [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 688.176340] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-35d1b3a3-3800-4ca4-b360-cce541dbcceb tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 688.176340] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-35d1b3a3-3800-4ca4-b360-cce541dbcceb tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 688.176826] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-35d1b3a3-3800-4ca4-b360-cce541dbcceb tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Deleting the datastore file [datastore1] 6102f8e6-f815-4f5f-921f-990be81fca0d {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 688.177720] env[69328]: DEBUG nova.compute.manager [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] [instance: b7409a67-c140-436f-9c4e-27dae259f648] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 688.177901] env[69328]: DEBUG nova.network.neutron [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] [instance: b7409a67-c140-436f-9c4e-27dae259f648] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 688.180461] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6aa8708e-1358-43b5-a61d-944ebd3c62d7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.188173] env[69328]: DEBUG oslo_vmware.api [None req-35d1b3a3-3800-4ca4-b360-cce541dbcceb tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Waiting for the task: (returnval){ [ 688.188173] env[69328]: value = "task-3272942" [ 688.188173] env[69328]: _type = "Task" [ 688.188173] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.190614] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Created folder: Project (87581f423dc64e4fb9fe1d51ebc68597) in parent group-v653649. [ 688.191347] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Creating folder: Instances. Parent ref: group-v653734. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 688.195372] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3b6d89d9-2b1a-4faa-a1f7-18f45efd82c7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.203384] env[69328]: DEBUG oslo_vmware.api [None req-35d1b3a3-3800-4ca4-b360-cce541dbcceb tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': task-3272942, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.209245] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Created folder: Instances in parent group-v653734. [ 688.209936] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 688.209936] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 688.210144] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8e56b525-ef93-4401-8584-a21e89613503 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.235732] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 688.235732] env[69328]: value = "task-3272944" [ 688.235732] env[69328]: _type = "Task" [ 688.235732] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.246820] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272944, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.262523] env[69328]: DEBUG nova.policy [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a16e0ccf09db44f6871d900be4a3ad3a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fdee7fe302e34286b52188a8987d67cf', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 688.481814] env[69328]: DEBUG nova.network.neutron [-] [instance: c3673531-9167-4d33-b8ce-d6afa5e589bc] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 688.550039] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4811b5f6-3363-40c1-87f7-1c2bd797b109 tempest-VolumesAssistedSnapshotsTest-1226842710 tempest-VolumesAssistedSnapshotsTest-1226842710-project-admin] Lock "a798c3f2-ccde-488e-8a14-21f4a04f8e12" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 688.679309] env[69328]: DEBUG nova.compute.manager [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] [instance: b7409a67-c140-436f-9c4e-27dae259f648] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 688.699346] env[69328]: DEBUG nova.network.neutron [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] [instance: b7409a67-c140-436f-9c4e-27dae259f648] Successfully created port: b4eef5a7-243c-42a1-803f-2405f4009cf7 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 688.707587] env[69328]: DEBUG oslo_vmware.api [None req-35d1b3a3-3800-4ca4-b360-cce541dbcceb tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Task: {'id': task-3272942, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.223444} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.708079] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-35d1b3a3-3800-4ca4-b360-cce541dbcceb tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 688.708490] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-35d1b3a3-3800-4ca4-b360-cce541dbcceb tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 688.708794] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-35d1b3a3-3800-4ca4-b360-cce541dbcceb tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 688.709131] env[69328]: INFO nova.compute.manager [None req-35d1b3a3-3800-4ca4-b360-cce541dbcceb tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Took 1.15 seconds to destroy the instance on the hypervisor. [ 688.709474] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-35d1b3a3-3800-4ca4-b360-cce541dbcceb tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 688.709758] env[69328]: DEBUG nova.compute.manager [-] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 688.709972] env[69328]: DEBUG nova.network.neutron [-] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 688.745527] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272944, 'name': CreateVM_Task, 'duration_secs': 0.465536} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.745736] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 688.752022] env[69328]: DEBUG oslo_concurrency.lockutils [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 688.752022] env[69328]: DEBUG oslo_concurrency.lockutils [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 688.752022] env[69328]: DEBUG oslo_concurrency.lockutils [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 688.752022] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa0e2cd3-e190-45fb-a10b-d7a178912d91 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.755079] env[69328]: DEBUG oslo_vmware.api [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 688.755079] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]524eaa75-1e8e-97cb-09a5-f8ba68f4ef85" [ 688.755079] env[69328]: _type = "Task" [ 688.755079] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.763956] env[69328]: DEBUG oslo_vmware.api [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]524eaa75-1e8e-97cb-09a5-f8ba68f4ef85, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.794995] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4584be76-3ae5-4532-8bd3-ef7b1232f0c5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.805896] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc51cfb5-f3ca-4c9b-8ebc-26ab449f0625 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.837956] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bd80965-0588-45fe-96a1-eacfbd267d7c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.848963] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52f11f9f-f2b9-467a-a88f-0f3cf674db09 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.861947] env[69328]: DEBUG nova.compute.provider_tree [None req-2943fff6-bb40-405e-a549-f2cad6459000 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 688.983113] env[69328]: INFO nova.compute.manager [-] [instance: c3673531-9167-4d33-b8ce-d6afa5e589bc] Took 1.25 seconds to deallocate network for instance. [ 689.191577] env[69328]: DEBUG nova.compute.manager [req-f795a722-4199-4732-9d2e-239111538b78 req-e25e52d8-dfe7-4804-9c5e-deef7bd62e30 service nova] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Received event network-changed-9e189e9a-ecbf-475e-82a4-508c1a0aec74 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 689.191919] env[69328]: DEBUG nova.compute.manager [req-f795a722-4199-4732-9d2e-239111538b78 req-e25e52d8-dfe7-4804-9c5e-deef7bd62e30 service nova] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Refreshing instance network info cache due to event network-changed-9e189e9a-ecbf-475e-82a4-508c1a0aec74. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 689.192259] env[69328]: DEBUG oslo_concurrency.lockutils [req-f795a722-4199-4732-9d2e-239111538b78 req-e25e52d8-dfe7-4804-9c5e-deef7bd62e30 service nova] Acquiring lock "refresh_cache-b0a1441c-81e2-4131-a2ff-f5042d559d9f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 689.196359] env[69328]: DEBUG oslo_concurrency.lockutils [req-f795a722-4199-4732-9d2e-239111538b78 req-e25e52d8-dfe7-4804-9c5e-deef7bd62e30 service nova] Acquired lock "refresh_cache-b0a1441c-81e2-4131-a2ff-f5042d559d9f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 689.196545] env[69328]: DEBUG nova.network.neutron [req-f795a722-4199-4732-9d2e-239111538b78 req-e25e52d8-dfe7-4804-9c5e-deef7bd62e30 service nova] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Refreshing network info cache for port 9e189e9a-ecbf-475e-82a4-508c1a0aec74 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 689.266603] env[69328]: DEBUG oslo_vmware.api [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]524eaa75-1e8e-97cb-09a5-f8ba68f4ef85, 'name': SearchDatastore_Task, 'duration_secs': 0.021386} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.267360] env[69328]: DEBUG oslo_concurrency.lockutils [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 689.267611] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 689.267888] env[69328]: DEBUG oslo_concurrency.lockutils [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 689.268156] env[69328]: DEBUG oslo_concurrency.lockutils [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 689.268476] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 689.269276] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7a783392-f720-4a64-82d3-7e86a634cb2b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.278327] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 689.278526] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 689.279350] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf741218-14fa-42ba-af34-df0a5bffefa5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.286140] env[69328]: DEBUG oslo_vmware.api [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 689.286140] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52dfaae3-6767-4d36-52ea-7851a1f26b9f" [ 689.286140] env[69328]: _type = "Task" [ 689.286140] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.295207] env[69328]: DEBUG oslo_vmware.api [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52dfaae3-6767-4d36-52ea-7851a1f26b9f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.365511] env[69328]: DEBUG nova.scheduler.client.report [None req-2943fff6-bb40-405e-a549-f2cad6459000 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 689.490894] env[69328]: DEBUG oslo_concurrency.lockutils [None req-53e9d6fb-ac70-448d-a705-85e6c8f30036 tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 689.630055] env[69328]: DEBUG nova.network.neutron [-] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 689.665432] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4811b5f6-3363-40c1-87f7-1c2bd797b109 tempest-VolumesAssistedSnapshotsTest-1226842710 tempest-VolumesAssistedSnapshotsTest-1226842710-project-admin] Acquiring lock "a798c3f2-ccde-488e-8a14-21f4a04f8e12" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 689.665689] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4811b5f6-3363-40c1-87f7-1c2bd797b109 tempest-VolumesAssistedSnapshotsTest-1226842710 tempest-VolumesAssistedSnapshotsTest-1226842710-project-admin] Lock "a798c3f2-ccde-488e-8a14-21f4a04f8e12" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 689.665924] env[69328]: INFO nova.compute.manager [None req-4811b5f6-3363-40c1-87f7-1c2bd797b109 tempest-VolumesAssistedSnapshotsTest-1226842710 tempest-VolumesAssistedSnapshotsTest-1226842710-project-admin] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Attaching volume b03f8959-d639-4179-af5f-70919bd7f154 to /dev/sdb [ 689.699286] env[69328]: DEBUG nova.compute.manager [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] [instance: b7409a67-c140-436f-9c4e-27dae259f648] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 689.708325] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7e0f4cb-a588-4161-b9c3-c704498f045c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.717756] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24b1df1b-b75d-4951-afe1-42eb02bba604 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.722703] env[69328]: DEBUG nova.virt.hardware [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 689.724026] env[69328]: DEBUG nova.virt.hardware [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 689.724026] env[69328]: DEBUG nova.virt.hardware [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 689.724026] env[69328]: DEBUG nova.virt.hardware [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 689.724026] env[69328]: DEBUG nova.virt.hardware [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 689.724026] env[69328]: DEBUG nova.virt.hardware [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 689.724223] env[69328]: DEBUG nova.virt.hardware [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 689.724257] env[69328]: DEBUG nova.virt.hardware [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 689.725019] env[69328]: DEBUG nova.virt.hardware [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 689.725019] env[69328]: DEBUG nova.virt.hardware [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 689.725019] env[69328]: DEBUG nova.virt.hardware [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 689.725504] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25c33fce-4ff6-4600-b1e2-2f3a6d722224 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.733818] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d80381eb-bdf4-4fd0-85b1-4c95195043cf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.744065] env[69328]: DEBUG nova.virt.block_device [None req-4811b5f6-3363-40c1-87f7-1c2bd797b109 tempest-VolumesAssistedSnapshotsTest-1226842710 tempest-VolumesAssistedSnapshotsTest-1226842710-project-admin] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Updating existing volume attachment record: afe9962f-00d7-4da8-8693-bf60e9dace39 {{(pid=69328) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 689.797014] env[69328]: DEBUG oslo_vmware.api [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52dfaae3-6767-4d36-52ea-7851a1f26b9f, 'name': SearchDatastore_Task, 'duration_secs': 0.009206} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.797848] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-223563a3-2281-4f83-92f0-aee3bda9527f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.803114] env[69328]: DEBUG oslo_vmware.api [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 689.803114] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]523ff3ec-f621-b3a6-e72b-98e705e418fb" [ 689.803114] env[69328]: _type = "Task" [ 689.803114] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.812961] env[69328]: DEBUG oslo_vmware.api [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]523ff3ec-f621-b3a6-e72b-98e705e418fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.873993] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2943fff6-bb40-405e-a549-f2cad6459000 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.209s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 689.879068] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6ad47de2-9a72-45ee-be43-3ebb4501e24d tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.633s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 689.879237] env[69328]: DEBUG nova.objects.instance [None req-6ad47de2-9a72-45ee-be43-3ebb4501e24d tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Lazy-loading 'resources' on Instance uuid f428f9a9-d792-4c1c-b2d4-ea066cc09d67 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 689.910085] env[69328]: INFO nova.scheduler.client.report [None req-2943fff6-bb40-405e-a549-f2cad6459000 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Deleted allocations for instance 26feb2d1-ff64-4a13-af83-b6d5fe4348e1 [ 689.994089] env[69328]: DEBUG nova.network.neutron [req-f795a722-4199-4732-9d2e-239111538b78 req-e25e52d8-dfe7-4804-9c5e-deef7bd62e30 service nova] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Updated VIF entry in instance network info cache for port 9e189e9a-ecbf-475e-82a4-508c1a0aec74. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 689.994487] env[69328]: DEBUG nova.network.neutron [req-f795a722-4199-4732-9d2e-239111538b78 req-e25e52d8-dfe7-4804-9c5e-deef7bd62e30 service nova] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Updating instance_info_cache with network_info: [{"id": "9e189e9a-ecbf-475e-82a4-508c1a0aec74", "address": "fa:16:3e:40:7c:9a", "network": {"id": "ce3bec03-01f9-4ac9-80e4-bfb944c0bb66", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-545997027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87581f423dc64e4fb9fe1d51ebc68597", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e189e9a-ec", "ovs_interfaceid": "9e189e9a-ecbf-475e-82a4-508c1a0aec74", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 690.132179] env[69328]: INFO nova.compute.manager [-] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Took 1.42 seconds to deallocate network for instance. [ 690.315207] env[69328]: DEBUG oslo_vmware.api [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]523ff3ec-f621-b3a6-e72b-98e705e418fb, 'name': SearchDatastore_Task, 'duration_secs': 0.010165} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.315623] env[69328]: DEBUG oslo_concurrency.lockutils [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 690.315966] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] b0a1441c-81e2-4131-a2ff-f5042d559d9f/b0a1441c-81e2-4131-a2ff-f5042d559d9f.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 690.316410] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4c10ea26-301b-4fdc-87ee-7b73b218fe10 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.324377] env[69328]: DEBUG oslo_vmware.api [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 690.324377] env[69328]: value = "task-3272948" [ 690.324377] env[69328]: _type = "Task" [ 690.324377] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.333928] env[69328]: DEBUG oslo_vmware.api [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3272948, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.419955] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2943fff6-bb40-405e-a549-f2cad6459000 tempest-ServersAdmin275Test-416590845 tempest-ServersAdmin275Test-416590845-project-member] Lock "26feb2d1-ff64-4a13-af83-b6d5fe4348e1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.348s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 690.498928] env[69328]: DEBUG oslo_concurrency.lockutils [req-f795a722-4199-4732-9d2e-239111538b78 req-e25e52d8-dfe7-4804-9c5e-deef7bd62e30 service nova] Releasing lock "refresh_cache-b0a1441c-81e2-4131-a2ff-f5042d559d9f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 690.499702] env[69328]: DEBUG nova.compute.manager [req-f795a722-4199-4732-9d2e-239111538b78 req-e25e52d8-dfe7-4804-9c5e-deef7bd62e30 service nova] [instance: c3673531-9167-4d33-b8ce-d6afa5e589bc] Received event network-vif-deleted-5deae7a2-4461-4670-a9f6-fda626bae672 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 690.633124] env[69328]: DEBUG nova.network.neutron [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] [instance: b7409a67-c140-436f-9c4e-27dae259f648] Successfully updated port: b4eef5a7-243c-42a1-803f-2405f4009cf7 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 690.644736] env[69328]: DEBUG oslo_concurrency.lockutils [None req-35d1b3a3-3800-4ca4-b360-cce541dbcceb tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 690.837952] env[69328]: DEBUG oslo_vmware.api [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3272948, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.937844] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-884e5646-8607-4044-841d-44a2c31b3f7d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.945997] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-221a85c6-4116-49d9-bdb9-2548ca2ff3c3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.985225] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c75f739-6c23-421a-b431-c4a87d8628b2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.988589] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82d53df2-b216-41fe-a1c5-1beb18283c27 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.003435] env[69328]: DEBUG nova.compute.provider_tree [None req-6ad47de2-9a72-45ee-be43-3ebb4501e24d tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 691.138181] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Acquiring lock "refresh_cache-b7409a67-c140-436f-9c4e-27dae259f648" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 691.138442] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Acquired lock "refresh_cache-b7409a67-c140-436f-9c4e-27dae259f648" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 691.138605] env[69328]: DEBUG nova.network.neutron [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] [instance: b7409a67-c140-436f-9c4e-27dae259f648] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 691.223147] env[69328]: DEBUG nova.compute.manager [req-140449da-62ba-4d4a-8084-43ba3d5d72d6 req-40ce89a0-5e00-4e1d-8486-39ba8578f5b7 service nova] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Received event network-vif-deleted-ecb0c91b-f122-4c9d-8c9f-480b703a9915 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 691.223147] env[69328]: DEBUG nova.compute.manager [req-140449da-62ba-4d4a-8084-43ba3d5d72d6 req-40ce89a0-5e00-4e1d-8486-39ba8578f5b7 service nova] [instance: b7409a67-c140-436f-9c4e-27dae259f648] Received event network-vif-plugged-b4eef5a7-243c-42a1-803f-2405f4009cf7 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 691.223376] env[69328]: DEBUG oslo_concurrency.lockutils [req-140449da-62ba-4d4a-8084-43ba3d5d72d6 req-40ce89a0-5e00-4e1d-8486-39ba8578f5b7 service nova] Acquiring lock "b7409a67-c140-436f-9c4e-27dae259f648-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 691.223731] env[69328]: DEBUG oslo_concurrency.lockutils [req-140449da-62ba-4d4a-8084-43ba3d5d72d6 req-40ce89a0-5e00-4e1d-8486-39ba8578f5b7 service nova] Lock "b7409a67-c140-436f-9c4e-27dae259f648-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 691.225783] env[69328]: DEBUG oslo_concurrency.lockutils [req-140449da-62ba-4d4a-8084-43ba3d5d72d6 req-40ce89a0-5e00-4e1d-8486-39ba8578f5b7 service nova] Lock "b7409a67-c140-436f-9c4e-27dae259f648-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 691.225783] env[69328]: DEBUG nova.compute.manager [req-140449da-62ba-4d4a-8084-43ba3d5d72d6 req-40ce89a0-5e00-4e1d-8486-39ba8578f5b7 service nova] [instance: b7409a67-c140-436f-9c4e-27dae259f648] No waiting events found dispatching network-vif-plugged-b4eef5a7-243c-42a1-803f-2405f4009cf7 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 691.225783] env[69328]: WARNING nova.compute.manager [req-140449da-62ba-4d4a-8084-43ba3d5d72d6 req-40ce89a0-5e00-4e1d-8486-39ba8578f5b7 service nova] [instance: b7409a67-c140-436f-9c4e-27dae259f648] Received unexpected event network-vif-plugged-b4eef5a7-243c-42a1-803f-2405f4009cf7 for instance with vm_state building and task_state spawning. [ 691.225783] env[69328]: DEBUG nova.compute.manager [req-140449da-62ba-4d4a-8084-43ba3d5d72d6 req-40ce89a0-5e00-4e1d-8486-39ba8578f5b7 service nova] [instance: b7409a67-c140-436f-9c4e-27dae259f648] Received event network-changed-b4eef5a7-243c-42a1-803f-2405f4009cf7 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 691.225783] env[69328]: DEBUG nova.compute.manager [req-140449da-62ba-4d4a-8084-43ba3d5d72d6 req-40ce89a0-5e00-4e1d-8486-39ba8578f5b7 service nova] [instance: b7409a67-c140-436f-9c4e-27dae259f648] Refreshing instance network info cache due to event network-changed-b4eef5a7-243c-42a1-803f-2405f4009cf7. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 691.225990] env[69328]: DEBUG oslo_concurrency.lockutils [req-140449da-62ba-4d4a-8084-43ba3d5d72d6 req-40ce89a0-5e00-4e1d-8486-39ba8578f5b7 service nova] Acquiring lock "refresh_cache-b7409a67-c140-436f-9c4e-27dae259f648" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 691.338141] env[69328]: DEBUG oslo_vmware.api [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3272948, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.565488} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.338141] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] b0a1441c-81e2-4131-a2ff-f5042d559d9f/b0a1441c-81e2-4131-a2ff-f5042d559d9f.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 691.338141] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 691.338141] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7776486f-6d0f-4e28-aaa3-25dd10ecd156 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.347899] env[69328]: DEBUG oslo_vmware.api [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 691.347899] env[69328]: value = "task-3272949" [ 691.347899] env[69328]: _type = "Task" [ 691.347899] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.358739] env[69328]: DEBUG oslo_vmware.api [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3272949, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.507179] env[69328]: DEBUG nova.scheduler.client.report [None req-6ad47de2-9a72-45ee-be43-3ebb4501e24d tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 691.671136] env[69328]: DEBUG nova.network.neutron [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] [instance: b7409a67-c140-436f-9c4e-27dae259f648] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 691.832768] env[69328]: DEBUG nova.network.neutron [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] [instance: b7409a67-c140-436f-9c4e-27dae259f648] Updating instance_info_cache with network_info: [{"id": "b4eef5a7-243c-42a1-803f-2405f4009cf7", "address": "fa:16:3e:8c:fd:94", "network": {"id": "3e1fd653-8cdb-4d46-b389-68720fefd743", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1990693840-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fdee7fe302e34286b52188a8987d67cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7150f662-0cf1-44f9-ae14-d70f479649b6", "external-id": "nsx-vlan-transportzone-712", "segmentation_id": 712, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4eef5a7-24", "ovs_interfaceid": "b4eef5a7-243c-42a1-803f-2405f4009cf7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 691.864497] env[69328]: DEBUG oslo_vmware.api [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3272949, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075807} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.864497] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 691.864497] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d868178-974d-4dee-8aa4-ef63d9cc2327 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.894112] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Reconfiguring VM instance instance-0000001b to attach disk [datastore1] b0a1441c-81e2-4131-a2ff-f5042d559d9f/b0a1441c-81e2-4131-a2ff-f5042d559d9f.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 691.894499] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d85c3f9f-f451-4327-9700-1f2c36ee4091 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.915608] env[69328]: DEBUG oslo_vmware.api [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 691.915608] env[69328]: value = "task-3272950" [ 691.915608] env[69328]: _type = "Task" [ 691.915608] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.926533] env[69328]: DEBUG oslo_vmware.api [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3272950, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.013287] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6ad47de2-9a72-45ee-be43-3ebb4501e24d tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.134s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 692.015197] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.887s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 692.016801] env[69328]: INFO nova.compute.claims [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 692.038047] env[69328]: INFO nova.scheduler.client.report [None req-6ad47de2-9a72-45ee-be43-3ebb4501e24d tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Deleted allocations for instance f428f9a9-d792-4c1c-b2d4-ea066cc09d67 [ 692.337905] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Releasing lock "refresh_cache-b7409a67-c140-436f-9c4e-27dae259f648" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 692.338233] env[69328]: DEBUG nova.compute.manager [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] [instance: b7409a67-c140-436f-9c4e-27dae259f648] Instance network_info: |[{"id": "b4eef5a7-243c-42a1-803f-2405f4009cf7", "address": "fa:16:3e:8c:fd:94", "network": {"id": "3e1fd653-8cdb-4d46-b389-68720fefd743", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1990693840-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fdee7fe302e34286b52188a8987d67cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7150f662-0cf1-44f9-ae14-d70f479649b6", "external-id": "nsx-vlan-transportzone-712", "segmentation_id": 712, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4eef5a7-24", "ovs_interfaceid": "b4eef5a7-243c-42a1-803f-2405f4009cf7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 692.338659] env[69328]: DEBUG oslo_concurrency.lockutils [req-140449da-62ba-4d4a-8084-43ba3d5d72d6 req-40ce89a0-5e00-4e1d-8486-39ba8578f5b7 service nova] Acquired lock "refresh_cache-b7409a67-c140-436f-9c4e-27dae259f648" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 692.338734] env[69328]: DEBUG nova.network.neutron [req-140449da-62ba-4d4a-8084-43ba3d5d72d6 req-40ce89a0-5e00-4e1d-8486-39ba8578f5b7 service nova] [instance: b7409a67-c140-436f-9c4e-27dae259f648] Refreshing network info cache for port b4eef5a7-243c-42a1-803f-2405f4009cf7 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 692.340034] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] [instance: b7409a67-c140-436f-9c4e-27dae259f648] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8c:fd:94', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7150f662-0cf1-44f9-ae14-d70f479649b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b4eef5a7-243c-42a1-803f-2405f4009cf7', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 692.349339] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Creating folder: Project (fdee7fe302e34286b52188a8987d67cf). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 692.353535] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fb686e47-03fe-4142-b8f8-eb3c97be70ca {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.372942] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Created folder: Project (fdee7fe302e34286b52188a8987d67cf) in parent group-v653649. [ 692.372942] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Creating folder: Instances. Parent ref: group-v653740. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 692.372942] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-108686b7-a6eb-49c6-9658-9572532b3a49 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.388024] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Created folder: Instances in parent group-v653740. [ 692.388024] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 692.388024] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7409a67-c140-436f-9c4e-27dae259f648] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 692.388024] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e9f26287-ba8c-414c-bbb9-e5843ea0a012 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.406443] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 692.406443] env[69328]: value = "task-3272954" [ 692.406443] env[69328]: _type = "Task" [ 692.406443] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.415254] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272954, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.425188] env[69328]: DEBUG oslo_vmware.api [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3272950, 'name': ReconfigVM_Task, 'duration_secs': 0.329698} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.425468] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Reconfigured VM instance instance-0000001b to attach disk [datastore1] b0a1441c-81e2-4131-a2ff-f5042d559d9f/b0a1441c-81e2-4131-a2ff-f5042d559d9f.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 692.426169] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d1f6d2d2-9d2a-4471-ac4a-4fea47bb2b55 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.433593] env[69328]: DEBUG oslo_vmware.api [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 692.433593] env[69328]: value = "task-3272955" [ 692.433593] env[69328]: _type = "Task" [ 692.433593] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.446129] env[69328]: DEBUG oslo_vmware.api [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3272955, 'name': Rename_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.548598] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6ad47de2-9a72-45ee-be43-3ebb4501e24d tempest-ServerDiagnosticsV248Test-1430996269 tempest-ServerDiagnosticsV248Test-1430996269-project-member] Lock "f428f9a9-d792-4c1c-b2d4-ea066cc09d67" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.508s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 692.917193] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272954, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.938653] env[69328]: DEBUG nova.network.neutron [req-140449da-62ba-4d4a-8084-43ba3d5d72d6 req-40ce89a0-5e00-4e1d-8486-39ba8578f5b7 service nova] [instance: b7409a67-c140-436f-9c4e-27dae259f648] Updated VIF entry in instance network info cache for port b4eef5a7-243c-42a1-803f-2405f4009cf7. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 692.939060] env[69328]: DEBUG nova.network.neutron [req-140449da-62ba-4d4a-8084-43ba3d5d72d6 req-40ce89a0-5e00-4e1d-8486-39ba8578f5b7 service nova] [instance: b7409a67-c140-436f-9c4e-27dae259f648] Updating instance_info_cache with network_info: [{"id": "b4eef5a7-243c-42a1-803f-2405f4009cf7", "address": "fa:16:3e:8c:fd:94", "network": {"id": "3e1fd653-8cdb-4d46-b389-68720fefd743", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1990693840-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fdee7fe302e34286b52188a8987d67cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7150f662-0cf1-44f9-ae14-d70f479649b6", "external-id": "nsx-vlan-transportzone-712", "segmentation_id": 712, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4eef5a7-24", "ovs_interfaceid": "b4eef5a7-243c-42a1-803f-2405f4009cf7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 692.949020] env[69328]: DEBUG oslo_vmware.api [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3272955, 'name': Rename_Task, 'duration_secs': 0.152517} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.950212] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 692.950520] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2dc384cc-e475-4f66-b515-2baaf44740eb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.963659] env[69328]: DEBUG oslo_vmware.api [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 692.963659] env[69328]: value = "task-3272956" [ 692.963659] env[69328]: _type = "Task" [ 692.963659] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.981278] env[69328]: DEBUG oslo_vmware.api [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3272956, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.417880] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272954, 'name': CreateVM_Task, 'duration_secs': 0.5193} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.420756] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7409a67-c140-436f-9c4e-27dae259f648] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 693.421732] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 693.421906] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 693.422415] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 693.422529] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4061242-67e8-44e7-b9e6-87ba1fd55af2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.427579] env[69328]: DEBUG oslo_vmware.api [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Waiting for the task: (returnval){ [ 693.427579] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52173e06-6cd6-a003-f855-eb3bbed5b483" [ 693.427579] env[69328]: _type = "Task" [ 693.427579] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.435964] env[69328]: DEBUG oslo_vmware.api [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52173e06-6cd6-a003-f855-eb3bbed5b483, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.446811] env[69328]: DEBUG oslo_concurrency.lockutils [req-140449da-62ba-4d4a-8084-43ba3d5d72d6 req-40ce89a0-5e00-4e1d-8486-39ba8578f5b7 service nova] Releasing lock "refresh_cache-b7409a67-c140-436f-9c4e-27dae259f648" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 693.476501] env[69328]: DEBUG oslo_vmware.api [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3272956, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.569345] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71ba5fd5-84f2-4555-a9bf-adfea76b9d0e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.577144] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2db74d2-a860-407e-91d0-6fc0a1bf4e7d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.613132] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d92bab5f-f083-44bd-8672-258738417af3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.621531] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce5be4b8-d61c-437a-a1a4-29be8697bc8b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.638321] env[69328]: DEBUG nova.compute.provider_tree [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 693.943629] env[69328]: DEBUG oslo_vmware.api [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52173e06-6cd6-a003-f855-eb3bbed5b483, 'name': SearchDatastore_Task, 'duration_secs': 0.017516} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.944121] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 693.944460] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] [instance: b7409a67-c140-436f-9c4e-27dae259f648] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 693.944875] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 693.945141] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 693.945410] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 693.945794] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fd1683ad-d497-4aa8-8442-bb04904f723d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.959105] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 693.959105] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 693.959105] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-705aecf9-dc48-48d4-b3fd-3ed364db88c5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.968909] env[69328]: DEBUG oslo_vmware.api [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Waiting for the task: (returnval){ [ 693.968909] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]525ea368-dbc9-73d3-998d-4cdccc5db171" [ 693.968909] env[69328]: _type = "Task" [ 693.968909] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.981830] env[69328]: DEBUG oslo_vmware.api [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3272956, 'name': PowerOnVM_Task, 'duration_secs': 0.667412} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.985262] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 693.985677] env[69328]: INFO nova.compute.manager [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Took 8.13 seconds to spawn the instance on the hypervisor. [ 693.986026] env[69328]: DEBUG nova.compute.manager [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 693.986457] env[69328]: DEBUG oslo_vmware.api [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]525ea368-dbc9-73d3-998d-4cdccc5db171, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.987374] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a748bea-dd8e-4a3a-adb7-f9863f4f9e55 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.141616] env[69328]: DEBUG nova.scheduler.client.report [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 694.298725] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-4811b5f6-3363-40c1-87f7-1c2bd797b109 tempest-VolumesAssistedSnapshotsTest-1226842710 tempest-VolumesAssistedSnapshotsTest-1226842710-project-admin] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Volume attach. Driver type: vmdk {{(pid=69328) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 694.298943] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-4811b5f6-3363-40c1-87f7-1c2bd797b109 tempest-VolumesAssistedSnapshotsTest-1226842710 tempest-VolumesAssistedSnapshotsTest-1226842710-project-admin] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653739', 'volume_id': 'b03f8959-d639-4179-af5f-70919bd7f154', 'name': 'volume-b03f8959-d639-4179-af5f-70919bd7f154', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a798c3f2-ccde-488e-8a14-21f4a04f8e12', 'attached_at': '', 'detached_at': '', 'volume_id': 'b03f8959-d639-4179-af5f-70919bd7f154', 'serial': 'b03f8959-d639-4179-af5f-70919bd7f154'} {{(pid=69328) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 694.299846] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aef5273d-fa61-48e1-a0e0-6c1bcfd608e6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.331374] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdf62724-234c-4152-add1-831c75cd1ee8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.361811] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-4811b5f6-3363-40c1-87f7-1c2bd797b109 tempest-VolumesAssistedSnapshotsTest-1226842710 tempest-VolumesAssistedSnapshotsTest-1226842710-project-admin] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Reconfiguring VM instance instance-0000000b to attach disk [datastore1] volume-b03f8959-d639-4179-af5f-70919bd7f154/volume-b03f8959-d639-4179-af5f-70919bd7f154.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 694.362229] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c900fa37-070d-4ec1-b663-60f96983f56b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.380910] env[69328]: DEBUG oslo_vmware.api [None req-4811b5f6-3363-40c1-87f7-1c2bd797b109 tempest-VolumesAssistedSnapshotsTest-1226842710 tempest-VolumesAssistedSnapshotsTest-1226842710-project-admin] Waiting for the task: (returnval){ [ 694.380910] env[69328]: value = "task-3272957" [ 694.380910] env[69328]: _type = "Task" [ 694.380910] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.389097] env[69328]: DEBUG oslo_vmware.api [None req-4811b5f6-3363-40c1-87f7-1c2bd797b109 tempest-VolumesAssistedSnapshotsTest-1226842710 tempest-VolumesAssistedSnapshotsTest-1226842710-project-admin] Task: {'id': task-3272957, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.484820] env[69328]: DEBUG oslo_vmware.api [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]525ea368-dbc9-73d3-998d-4cdccc5db171, 'name': SearchDatastore_Task, 'duration_secs': 0.015764} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.486059] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c295a0e0-6412-4fb6-b5f0-5b6a4c4f440e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.494123] env[69328]: DEBUG oslo_vmware.api [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Waiting for the task: (returnval){ [ 694.494123] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e063a7-a61a-46d5-6617-cf72f6f4437f" [ 694.494123] env[69328]: _type = "Task" [ 694.494123] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.505479] env[69328]: DEBUG oslo_vmware.api [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e063a7-a61a-46d5-6617-cf72f6f4437f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.507819] env[69328]: INFO nova.compute.manager [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Took 48.36 seconds to build instance. [ 694.647144] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.632s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 694.647747] env[69328]: DEBUG nova.compute.manager [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 694.651586] env[69328]: DEBUG oslo_concurrency.lockutils [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.178s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 694.653491] env[69328]: INFO nova.compute.claims [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 694.891745] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Acquiring lock "f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 694.892009] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Lock "f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 694.896730] env[69328]: DEBUG oslo_vmware.api [None req-4811b5f6-3363-40c1-87f7-1c2bd797b109 tempest-VolumesAssistedSnapshotsTest-1226842710 tempest-VolumesAssistedSnapshotsTest-1226842710-project-admin] Task: {'id': task-3272957, 'name': ReconfigVM_Task, 'duration_secs': 0.431608} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.896994] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-4811b5f6-3363-40c1-87f7-1c2bd797b109 tempest-VolumesAssistedSnapshotsTest-1226842710 tempest-VolumesAssistedSnapshotsTest-1226842710-project-admin] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Reconfigured VM instance instance-0000000b to attach disk [datastore1] volume-b03f8959-d639-4179-af5f-70919bd7f154/volume-b03f8959-d639-4179-af5f-70919bd7f154.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 694.902338] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-09d82aef-8624-40d7-9f49-76fd2a8e8326 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.924395] env[69328]: DEBUG oslo_vmware.api [None req-4811b5f6-3363-40c1-87f7-1c2bd797b109 tempest-VolumesAssistedSnapshotsTest-1226842710 tempest-VolumesAssistedSnapshotsTest-1226842710-project-admin] Waiting for the task: (returnval){ [ 694.924395] env[69328]: value = "task-3272958" [ 694.924395] env[69328]: _type = "Task" [ 694.924395] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.935410] env[69328]: DEBUG oslo_vmware.api [None req-4811b5f6-3363-40c1-87f7-1c2bd797b109 tempest-VolumesAssistedSnapshotsTest-1226842710 tempest-VolumesAssistedSnapshotsTest-1226842710-project-admin] Task: {'id': task-3272958, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.004373] env[69328]: DEBUG oslo_vmware.api [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e063a7-a61a-46d5-6617-cf72f6f4437f, 'name': SearchDatastore_Task, 'duration_secs': 0.014009} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.004373] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 695.004373] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] b7409a67-c140-436f-9c4e-27dae259f648/b7409a67-c140-436f-9c4e-27dae259f648.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 695.004373] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c3686fcf-89b2-4217-98ef-d6fd1e13f07b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.010802] env[69328]: DEBUG oslo_concurrency.lockutils [None req-41d24cd9-a808-496d-9e49-77ef2cc4326b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "b0a1441c-81e2-4131-a2ff-f5042d559d9f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.626s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 695.022230] env[69328]: DEBUG oslo_vmware.api [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Waiting for the task: (returnval){ [ 695.022230] env[69328]: value = "task-3272959" [ 695.022230] env[69328]: _type = "Task" [ 695.022230] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.035294] env[69328]: DEBUG oslo_vmware.api [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Task: {'id': task-3272959, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.159273] env[69328]: DEBUG nova.compute.utils [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 695.164322] env[69328]: DEBUG nova.compute.manager [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 695.164520] env[69328]: DEBUG nova.network.neutron [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 695.242282] env[69328]: DEBUG nova.policy [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1685bb9a09d84a7a92306c64f0e5895e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '75d5853e3c724d02bacfa75173e38ab3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 695.440591] env[69328]: DEBUG oslo_vmware.api [None req-4811b5f6-3363-40c1-87f7-1c2bd797b109 tempest-VolumesAssistedSnapshotsTest-1226842710 tempest-VolumesAssistedSnapshotsTest-1226842710-project-admin] Task: {'id': task-3272958, 'name': ReconfigVM_Task, 'duration_secs': 0.163337} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.441133] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-4811b5f6-3363-40c1-87f7-1c2bd797b109 tempest-VolumesAssistedSnapshotsTest-1226842710 tempest-VolumesAssistedSnapshotsTest-1226842710-project-admin] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653739', 'volume_id': 'b03f8959-d639-4179-af5f-70919bd7f154', 'name': 'volume-b03f8959-d639-4179-af5f-70919bd7f154', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a798c3f2-ccde-488e-8a14-21f4a04f8e12', 'attached_at': '', 'detached_at': '', 'volume_id': 'b03f8959-d639-4179-af5f-70919bd7f154', 'serial': 'b03f8959-d639-4179-af5f-70919bd7f154'} {{(pid=69328) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 695.462647] env[69328]: DEBUG oslo_vmware.rw_handles [None req-de24d6d0-9871-4285-8906-8b9e68839917 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5252e18b-15dd-06f9-6fae-70e581532381/disk-0.vmdk. {{(pid=69328) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 695.465165] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d11a972-4c29-4d4f-a548-4ef4e7b97653 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.474182] env[69328]: DEBUG oslo_vmware.rw_handles [None req-de24d6d0-9871-4285-8906-8b9e68839917 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5252e18b-15dd-06f9-6fae-70e581532381/disk-0.vmdk is in state: ready. {{(pid=69328) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 695.474925] env[69328]: ERROR oslo_vmware.rw_handles [None req-de24d6d0-9871-4285-8906-8b9e68839917 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5252e18b-15dd-06f9-6fae-70e581532381/disk-0.vmdk due to incomplete transfer. [ 695.474925] env[69328]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-7894f283-ef24-4cb1-bb13-6d7afc689e75 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.487021] env[69328]: DEBUG oslo_vmware.rw_handles [None req-de24d6d0-9871-4285-8906-8b9e68839917 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5252e18b-15dd-06f9-6fae-70e581532381/disk-0.vmdk. {{(pid=69328) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 695.487021] env[69328]: DEBUG nova.virt.vmwareapi.images [None req-de24d6d0-9871-4285-8906-8b9e68839917 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Uploaded image d19c5a34-425c-40c8-a06e-435c94382c66 to the Glance image server {{(pid=69328) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 695.489059] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-de24d6d0-9871-4285-8906-8b9e68839917 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Destroying the VM {{(pid=69328) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 695.490036] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-366c5fc5-28a3-411a-90c3-aff1982737be {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.501178] env[69328]: DEBUG oslo_vmware.api [None req-de24d6d0-9871-4285-8906-8b9e68839917 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Waiting for the task: (returnval){ [ 695.501178] env[69328]: value = "task-3272960" [ 695.501178] env[69328]: _type = "Task" [ 695.501178] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.524732] env[69328]: DEBUG nova.compute.manager [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 695.531542] env[69328]: DEBUG oslo_vmware.api [None req-de24d6d0-9871-4285-8906-8b9e68839917 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3272960, 'name': Destroy_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.541805] env[69328]: DEBUG oslo_vmware.api [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Task: {'id': task-3272959, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.664877] env[69328]: DEBUG nova.compute.manager [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 695.851186] env[69328]: DEBUG nova.network.neutron [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Successfully created port: 67a5c2b8-cfa7-474e-91f4-f5b16fab46ca {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 695.920441] env[69328]: DEBUG nova.compute.manager [req-8757edf1-02bf-495b-888b-8e362d357d5a req-85cc2b89-5d10-43e3-b5b0-0da6c289ce81 service nova] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Received event network-changed-9e189e9a-ecbf-475e-82a4-508c1a0aec74 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 695.920504] env[69328]: DEBUG nova.compute.manager [req-8757edf1-02bf-495b-888b-8e362d357d5a req-85cc2b89-5d10-43e3-b5b0-0da6c289ce81 service nova] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Refreshing instance network info cache due to event network-changed-9e189e9a-ecbf-475e-82a4-508c1a0aec74. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 695.920726] env[69328]: DEBUG oslo_concurrency.lockutils [req-8757edf1-02bf-495b-888b-8e362d357d5a req-85cc2b89-5d10-43e3-b5b0-0da6c289ce81 service nova] Acquiring lock "refresh_cache-b0a1441c-81e2-4131-a2ff-f5042d559d9f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 695.920865] env[69328]: DEBUG oslo_concurrency.lockutils [req-8757edf1-02bf-495b-888b-8e362d357d5a req-85cc2b89-5d10-43e3-b5b0-0da6c289ce81 service nova] Acquired lock "refresh_cache-b0a1441c-81e2-4131-a2ff-f5042d559d9f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 695.921036] env[69328]: DEBUG nova.network.neutron [req-8757edf1-02bf-495b-888b-8e362d357d5a req-85cc2b89-5d10-43e3-b5b0-0da6c289ce81 service nova] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Refreshing network info cache for port 9e189e9a-ecbf-475e-82a4-508c1a0aec74 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 696.019619] env[69328]: DEBUG oslo_vmware.api [None req-de24d6d0-9871-4285-8906-8b9e68839917 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3272960, 'name': Destroy_Task, 'duration_secs': 0.359206} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.022666] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-de24d6d0-9871-4285-8906-8b9e68839917 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Destroyed the VM [ 696.022913] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-de24d6d0-9871-4285-8906-8b9e68839917 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Deleting Snapshot of the VM instance {{(pid=69328) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 696.023542] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-6b21fffc-fa21-41ee-a397-74fb9dbef156 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.034559] env[69328]: DEBUG oslo_vmware.api [None req-de24d6d0-9871-4285-8906-8b9e68839917 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Waiting for the task: (returnval){ [ 696.034559] env[69328]: value = "task-3272961" [ 696.034559] env[69328]: _type = "Task" [ 696.034559] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.047414] env[69328]: DEBUG oslo_vmware.api [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Task: {'id': task-3272959, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.052634] env[69328]: DEBUG oslo_vmware.api [None req-de24d6d0-9871-4285-8906-8b9e68839917 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3272961, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.053712] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 696.299457] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1f69444-72de-4c60-86a2-f51c70270902 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.307204] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d752ddf-93cd-40f3-ba8a-56119312097f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.342020] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3406fd92-083c-437b-8518-a072d590e0d7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.352811] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec9b8fcb-3026-4e3a-9982-25cd0027af84 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.368652] env[69328]: DEBUG nova.compute.provider_tree [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 696.508956] env[69328]: DEBUG nova.objects.instance [None req-4811b5f6-3363-40c1-87f7-1c2bd797b109 tempest-VolumesAssistedSnapshotsTest-1226842710 tempest-VolumesAssistedSnapshotsTest-1226842710-project-admin] Lazy-loading 'flavor' on Instance uuid a798c3f2-ccde-488e-8a14-21f4a04f8e12 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 696.543928] env[69328]: DEBUG oslo_vmware.api [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Task: {'id': task-3272959, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.561928] env[69328]: DEBUG oslo_vmware.api [None req-de24d6d0-9871-4285-8906-8b9e68839917 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3272961, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.685775] env[69328]: DEBUG nova.compute.manager [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 696.713579] env[69328]: DEBUG nova.network.neutron [req-8757edf1-02bf-495b-888b-8e362d357d5a req-85cc2b89-5d10-43e3-b5b0-0da6c289ce81 service nova] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Updated VIF entry in instance network info cache for port 9e189e9a-ecbf-475e-82a4-508c1a0aec74. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 696.713953] env[69328]: DEBUG nova.network.neutron [req-8757edf1-02bf-495b-888b-8e362d357d5a req-85cc2b89-5d10-43e3-b5b0-0da6c289ce81 service nova] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Updating instance_info_cache with network_info: [{"id": "9e189e9a-ecbf-475e-82a4-508c1a0aec74", "address": "fa:16:3e:40:7c:9a", "network": {"id": "ce3bec03-01f9-4ac9-80e4-bfb944c0bb66", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-545997027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.234", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87581f423dc64e4fb9fe1d51ebc68597", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e189e9a-ec", "ovs_interfaceid": "9e189e9a-ecbf-475e-82a4-508c1a0aec74", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 696.717134] env[69328]: DEBUG nova.virt.hardware [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 696.717365] env[69328]: DEBUG nova.virt.hardware [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 696.717518] env[69328]: DEBUG nova.virt.hardware [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 696.717696] env[69328]: DEBUG nova.virt.hardware [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 696.717840] env[69328]: DEBUG nova.virt.hardware [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 696.717992] env[69328]: DEBUG nova.virt.hardware [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 696.718234] env[69328]: DEBUG nova.virt.hardware [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 696.718391] env[69328]: DEBUG nova.virt.hardware [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 696.718592] env[69328]: DEBUG nova.virt.hardware [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 696.718766] env[69328]: DEBUG nova.virt.hardware [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 696.718936] env[69328]: DEBUG nova.virt.hardware [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 696.720137] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3323ba3-e0c9-4509-8e22-f966d0488288 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.728759] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1725fc8f-53cf-433e-a4ed-867d50b1edb7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.894405] env[69328]: ERROR nova.scheduler.client.report [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [req-95832d4c-4b31-468e-9585-b2989aa88890] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-95832d4c-4b31-468e-9585-b2989aa88890"}]} [ 696.914348] env[69328]: DEBUG nova.scheduler.client.report [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Refreshing inventories for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 696.928205] env[69328]: DEBUG nova.scheduler.client.report [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Updating ProviderTree inventory for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 696.928380] env[69328]: DEBUG nova.compute.provider_tree [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 696.940074] env[69328]: DEBUG nova.scheduler.client.report [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Refreshing aggregate associations for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e, aggregates: None {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 696.957025] env[69328]: DEBUG nova.scheduler.client.report [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Refreshing trait associations for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e, traits: COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 697.014848] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4811b5f6-3363-40c1-87f7-1c2bd797b109 tempest-VolumesAssistedSnapshotsTest-1226842710 tempest-VolumesAssistedSnapshotsTest-1226842710-project-admin] Lock "a798c3f2-ccde-488e-8a14-21f4a04f8e12" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.349s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 697.058312] env[69328]: DEBUG oslo_vmware.api [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Task: {'id': task-3272959, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.634837} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.058312] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] b7409a67-c140-436f-9c4e-27dae259f648/b7409a67-c140-436f-9c4e-27dae259f648.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 697.058312] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] [instance: b7409a67-c140-436f-9c4e-27dae259f648] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 697.058312] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5fdf7140-d730-4a16-884b-a37d4f6850bd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.070104] env[69328]: DEBUG oslo_vmware.api [None req-de24d6d0-9871-4285-8906-8b9e68839917 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3272961, 'name': RemoveSnapshot_Task, 'duration_secs': 0.55613} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.071451] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-de24d6d0-9871-4285-8906-8b9e68839917 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Deleted Snapshot of the VM instance {{(pid=69328) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 697.071451] env[69328]: INFO nova.compute.manager [None req-de24d6d0-9871-4285-8906-8b9e68839917 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Took 14.38 seconds to snapshot the instance on the hypervisor. [ 697.076240] env[69328]: DEBUG oslo_vmware.api [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Waiting for the task: (returnval){ [ 697.076240] env[69328]: value = "task-3272962" [ 697.076240] env[69328]: _type = "Task" [ 697.076240] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.090546] env[69328]: DEBUG oslo_vmware.api [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Task: {'id': task-3272962, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.184794] env[69328]: DEBUG oslo_concurrency.lockutils [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Acquiring lock "8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 697.184970] env[69328]: DEBUG oslo_concurrency.lockutils [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Lock "8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 697.224152] env[69328]: DEBUG oslo_concurrency.lockutils [req-8757edf1-02bf-495b-888b-8e362d357d5a req-85cc2b89-5d10-43e3-b5b0-0da6c289ce81 service nova] Releasing lock "refresh_cache-b0a1441c-81e2-4131-a2ff-f5042d559d9f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 697.375034] env[69328]: DEBUG nova.compute.manager [req-0e87000e-3dc5-41e9-baee-7708cc2990f2 req-2d68e2b3-2b05-4dd8-94b6-537bbe8839b2 service nova] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Received event network-vif-plugged-67a5c2b8-cfa7-474e-91f4-f5b16fab46ca {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 697.375259] env[69328]: DEBUG oslo_concurrency.lockutils [req-0e87000e-3dc5-41e9-baee-7708cc2990f2 req-2d68e2b3-2b05-4dd8-94b6-537bbe8839b2 service nova] Acquiring lock "b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 697.375471] env[69328]: DEBUG oslo_concurrency.lockutils [req-0e87000e-3dc5-41e9-baee-7708cc2990f2 req-2d68e2b3-2b05-4dd8-94b6-537bbe8839b2 service nova] Lock "b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 697.375638] env[69328]: DEBUG oslo_concurrency.lockutils [req-0e87000e-3dc5-41e9-baee-7708cc2990f2 req-2d68e2b3-2b05-4dd8-94b6-537bbe8839b2 service nova] Lock "b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 697.375800] env[69328]: DEBUG nova.compute.manager [req-0e87000e-3dc5-41e9-baee-7708cc2990f2 req-2d68e2b3-2b05-4dd8-94b6-537bbe8839b2 service nova] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] No waiting events found dispatching network-vif-plugged-67a5c2b8-cfa7-474e-91f4-f5b16fab46ca {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 697.375954] env[69328]: WARNING nova.compute.manager [req-0e87000e-3dc5-41e9-baee-7708cc2990f2 req-2d68e2b3-2b05-4dd8-94b6-537bbe8839b2 service nova] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Received unexpected event network-vif-plugged-67a5c2b8-cfa7-474e-91f4-f5b16fab46ca for instance with vm_state building and task_state spawning. [ 697.475679] env[69328]: DEBUG nova.network.neutron [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Successfully updated port: 67a5c2b8-cfa7-474e-91f4-f5b16fab46ca {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 697.509610] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2d28d8f-2c8f-465e-8c02-fe1ee0d1aaaa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.518179] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dd45172-f377-400b-936e-1a6acd868d17 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.550150] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b2af857-3b01-4b08-968b-ab0667fc9fcd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.558122] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10c1a865-b63b-45f4-8665-55cb3e7ce2d0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.573040] env[69328]: DEBUG nova.compute.provider_tree [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 697.587576] env[69328]: DEBUG oslo_vmware.api [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Task: {'id': task-3272962, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073422} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.587776] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] [instance: b7409a67-c140-436f-9c4e-27dae259f648] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 697.588530] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcc6a9c6-9bc6-45cc-a9ee-8c21cd871187 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.610917] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] [instance: b7409a67-c140-436f-9c4e-27dae259f648] Reconfiguring VM instance instance-0000001c to attach disk [datastore1] b7409a67-c140-436f-9c4e-27dae259f648/b7409a67-c140-436f-9c4e-27dae259f648.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 697.611370] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1768d01b-0268-4492-964e-a9a8e240a17e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.630437] env[69328]: DEBUG oslo_vmware.api [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Waiting for the task: (returnval){ [ 697.630437] env[69328]: value = "task-3272963" [ 697.630437] env[69328]: _type = "Task" [ 697.630437] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.637892] env[69328]: DEBUG oslo_vmware.api [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Task: {'id': task-3272963, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.668268] env[69328]: DEBUG oslo_concurrency.lockutils [None req-660a892c-6ba5-4b1f-a71a-8a5d78d335cf tempest-VolumesAssistedSnapshotsTest-1226842710 tempest-VolumesAssistedSnapshotsTest-1226842710-project-admin] Acquiring lock "a798c3f2-ccde-488e-8a14-21f4a04f8e12" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 697.668268] env[69328]: DEBUG oslo_concurrency.lockutils [None req-660a892c-6ba5-4b1f-a71a-8a5d78d335cf tempest-VolumesAssistedSnapshotsTest-1226842710 tempest-VolumesAssistedSnapshotsTest-1226842710-project-admin] Lock "a798c3f2-ccde-488e-8a14-21f4a04f8e12" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 697.979302] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "refresh_cache-b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 697.979777] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquired lock "refresh_cache-b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 697.979777] env[69328]: DEBUG nova.network.neutron [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 698.078026] env[69328]: DEBUG nova.scheduler.client.report [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 698.142147] env[69328]: DEBUG oslo_vmware.api [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Task: {'id': task-3272963, 'name': ReconfigVM_Task, 'duration_secs': 0.270679} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.142147] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] [instance: b7409a67-c140-436f-9c4e-27dae259f648] Reconfigured VM instance instance-0000001c to attach disk [datastore1] b7409a67-c140-436f-9c4e-27dae259f648/b7409a67-c140-436f-9c4e-27dae259f648.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 698.142582] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-69723f62-a5f4-44ee-9611-c4f909e5fd7d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.149569] env[69328]: DEBUG oslo_vmware.api [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Waiting for the task: (returnval){ [ 698.149569] env[69328]: value = "task-3272964" [ 698.149569] env[69328]: _type = "Task" [ 698.149569] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.157717] env[69328]: DEBUG oslo_vmware.api [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Task: {'id': task-3272964, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.171306] env[69328]: INFO nova.compute.manager [None req-660a892c-6ba5-4b1f-a71a-8a5d78d335cf tempest-VolumesAssistedSnapshotsTest-1226842710 tempest-VolumesAssistedSnapshotsTest-1226842710-project-admin] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Detaching volume b03f8959-d639-4179-af5f-70919bd7f154 [ 698.212382] env[69328]: INFO nova.virt.block_device [None req-660a892c-6ba5-4b1f-a71a-8a5d78d335cf tempest-VolumesAssistedSnapshotsTest-1226842710 tempest-VolumesAssistedSnapshotsTest-1226842710-project-admin] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Attempting to driver detach volume b03f8959-d639-4179-af5f-70919bd7f154 from mountpoint /dev/sdb [ 698.212621] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-660a892c-6ba5-4b1f-a71a-8a5d78d335cf tempest-VolumesAssistedSnapshotsTest-1226842710 tempest-VolumesAssistedSnapshotsTest-1226842710-project-admin] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Volume detach. Driver type: vmdk {{(pid=69328) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 698.212814] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-660a892c-6ba5-4b1f-a71a-8a5d78d335cf tempest-VolumesAssistedSnapshotsTest-1226842710 tempest-VolumesAssistedSnapshotsTest-1226842710-project-admin] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653739', 'volume_id': 'b03f8959-d639-4179-af5f-70919bd7f154', 'name': 'volume-b03f8959-d639-4179-af5f-70919bd7f154', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a798c3f2-ccde-488e-8a14-21f4a04f8e12', 'attached_at': '', 'detached_at': '', 'volume_id': 'b03f8959-d639-4179-af5f-70919bd7f154', 'serial': 'b03f8959-d639-4179-af5f-70919bd7f154'} {{(pid=69328) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 698.214136] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f95110f-fbcd-4807-bce3-268c6e28f488 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.236316] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-585a69aa-5582-452b-bda5-02b6358b6475 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.244256] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92e41a1b-6737-434c-b1b5-ec8bfbf37bdb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.268654] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9fe3091-beb3-45dc-8935-0e4ec1497ddd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.286028] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-660a892c-6ba5-4b1f-a71a-8a5d78d335cf tempest-VolumesAssistedSnapshotsTest-1226842710 tempest-VolumesAssistedSnapshotsTest-1226842710-project-admin] The volume has not been displaced from its original location: [datastore1] volume-b03f8959-d639-4179-af5f-70919bd7f154/volume-b03f8959-d639-4179-af5f-70919bd7f154.vmdk. No consolidation needed. {{(pid=69328) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 698.292806] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-660a892c-6ba5-4b1f-a71a-8a5d78d335cf tempest-VolumesAssistedSnapshotsTest-1226842710 tempest-VolumesAssistedSnapshotsTest-1226842710-project-admin] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Reconfiguring VM instance instance-0000000b to detach disk 2001 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 698.293138] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2b6ba46a-f2cb-43e5-a55e-96e0d9df5f1b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.311234] env[69328]: DEBUG oslo_vmware.api [None req-660a892c-6ba5-4b1f-a71a-8a5d78d335cf tempest-VolumesAssistedSnapshotsTest-1226842710 tempest-VolumesAssistedSnapshotsTest-1226842710-project-admin] Waiting for the task: (returnval){ [ 698.311234] env[69328]: value = "task-3272965" [ 698.311234] env[69328]: _type = "Task" [ 698.311234] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.319322] env[69328]: DEBUG oslo_vmware.api [None req-660a892c-6ba5-4b1f-a71a-8a5d78d335cf tempest-VolumesAssistedSnapshotsTest-1226842710 tempest-VolumesAssistedSnapshotsTest-1226842710-project-admin] Task: {'id': task-3272965, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.509490] env[69328]: DEBUG nova.network.neutron [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 698.581414] env[69328]: DEBUG oslo_concurrency.lockutils [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.930s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 698.581960] env[69328]: DEBUG nova.compute.manager [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 698.584694] env[69328]: DEBUG oslo_concurrency.lockutils [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.791s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 698.586286] env[69328]: INFO nova.compute.claims [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 698.640141] env[69328]: DEBUG nova.network.neutron [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Updating instance_info_cache with network_info: [{"id": "67a5c2b8-cfa7-474e-91f4-f5b16fab46ca", "address": "fa:16:3e:1e:59:e3", "network": {"id": "e2ab6957-2c9d-4b95-91bd-5a62d9a284ba", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-967470666-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75d5853e3c724d02bacfa75173e38ab3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abcf0d10-3f3f-45dc-923e-1c78766e2dad", "external-id": "nsx-vlan-transportzone-405", "segmentation_id": 405, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67a5c2b8-cf", "ovs_interfaceid": "67a5c2b8-cfa7-474e-91f4-f5b16fab46ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 698.659762] env[69328]: DEBUG oslo_vmware.api [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Task: {'id': task-3272964, 'name': Rename_Task, 'duration_secs': 0.134656} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.660036] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] [instance: b7409a67-c140-436f-9c4e-27dae259f648] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 698.660281] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e1a7e3b3-17f4-4438-b6a0-a066d98bdf95 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.666378] env[69328]: DEBUG oslo_vmware.api [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Waiting for the task: (returnval){ [ 698.666378] env[69328]: value = "task-3272966" [ 698.666378] env[69328]: _type = "Task" [ 698.666378] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.673789] env[69328]: DEBUG oslo_vmware.api [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Task: {'id': task-3272966, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.821374] env[69328]: DEBUG oslo_vmware.api [None req-660a892c-6ba5-4b1f-a71a-8a5d78d335cf tempest-VolumesAssistedSnapshotsTest-1226842710 tempest-VolumesAssistedSnapshotsTest-1226842710-project-admin] Task: {'id': task-3272965, 'name': ReconfigVM_Task, 'duration_secs': 0.219782} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.821740] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-660a892c-6ba5-4b1f-a71a-8a5d78d335cf tempest-VolumesAssistedSnapshotsTest-1226842710 tempest-VolumesAssistedSnapshotsTest-1226842710-project-admin] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Reconfigured VM instance instance-0000000b to detach disk 2001 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 698.826274] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b5768de7-9464-4c3c-acf2-011ebc5f4061 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.840985] env[69328]: DEBUG oslo_vmware.api [None req-660a892c-6ba5-4b1f-a71a-8a5d78d335cf tempest-VolumesAssistedSnapshotsTest-1226842710 tempest-VolumesAssistedSnapshotsTest-1226842710-project-admin] Waiting for the task: (returnval){ [ 698.840985] env[69328]: value = "task-3272967" [ 698.840985] env[69328]: _type = "Task" [ 698.840985] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.848755] env[69328]: DEBUG oslo_vmware.api [None req-660a892c-6ba5-4b1f-a71a-8a5d78d335cf tempest-VolumesAssistedSnapshotsTest-1226842710 tempest-VolumesAssistedSnapshotsTest-1226842710-project-admin] Task: {'id': task-3272967, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.093443] env[69328]: DEBUG nova.compute.utils [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 699.095276] env[69328]: DEBUG nova.compute.manager [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 699.095495] env[69328]: DEBUG nova.network.neutron [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 699.120900] env[69328]: DEBUG nova.compute.manager [None req-97cf03e4-4e29-43d7-a258-bd6519233b35 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 699.121800] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce15a064-865b-4608-a828-22e40762472f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.143252] env[69328]: DEBUG nova.policy [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7fbdc3e734be4369884cfcf483b2678f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4e8bc0d144f44546bd21fb04277c998c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 699.145277] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Releasing lock "refresh_cache-b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 699.145674] env[69328]: DEBUG nova.compute.manager [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Instance network_info: |[{"id": "67a5c2b8-cfa7-474e-91f4-f5b16fab46ca", "address": "fa:16:3e:1e:59:e3", "network": {"id": "e2ab6957-2c9d-4b95-91bd-5a62d9a284ba", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-967470666-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75d5853e3c724d02bacfa75173e38ab3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abcf0d10-3f3f-45dc-923e-1c78766e2dad", "external-id": "nsx-vlan-transportzone-405", "segmentation_id": 405, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67a5c2b8-cf", "ovs_interfaceid": "67a5c2b8-cfa7-474e-91f4-f5b16fab46ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 699.146407] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1e:59:e3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'abcf0d10-3f3f-45dc-923e-1c78766e2dad', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '67a5c2b8-cfa7-474e-91f4-f5b16fab46ca', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 699.154132] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Creating folder: Project (75d5853e3c724d02bacfa75173e38ab3). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 699.154945] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b8a6e784-549f-4ad6-b49f-cca5acce87a1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.167776] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Created folder: Project (75d5853e3c724d02bacfa75173e38ab3) in parent group-v653649. [ 699.167972] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Creating folder: Instances. Parent ref: group-v653743. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 699.171152] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b3147315-a4d8-4561-b701-fae545f636c9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.177843] env[69328]: DEBUG oslo_vmware.api [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Task: {'id': task-3272966, 'name': PowerOnVM_Task, 'duration_secs': 0.441018} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.178135] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] [instance: b7409a67-c140-436f-9c4e-27dae259f648] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 699.178344] env[69328]: INFO nova.compute.manager [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] [instance: b7409a67-c140-436f-9c4e-27dae259f648] Took 9.48 seconds to spawn the instance on the hypervisor. [ 699.178522] env[69328]: DEBUG nova.compute.manager [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] [instance: b7409a67-c140-436f-9c4e-27dae259f648] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 699.180269] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-557b40ab-e180-4e2e-bf8c-999c66331611 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.183803] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Created folder: Instances in parent group-v653743. [ 699.184057] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 699.184259] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 699.184821] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2b6c3d01-7eab-4c8d-97d7-a0c9af580838 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.207022] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 699.207022] env[69328]: value = "task-3272970" [ 699.207022] env[69328]: _type = "Task" [ 699.207022] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.216946] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272970, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.351321] env[69328]: DEBUG oslo_vmware.api [None req-660a892c-6ba5-4b1f-a71a-8a5d78d335cf tempest-VolumesAssistedSnapshotsTest-1226842710 tempest-VolumesAssistedSnapshotsTest-1226842710-project-admin] Task: {'id': task-3272967, 'name': ReconfigVM_Task, 'duration_secs': 0.142387} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.351634] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-660a892c-6ba5-4b1f-a71a-8a5d78d335cf tempest-VolumesAssistedSnapshotsTest-1226842710 tempest-VolumesAssistedSnapshotsTest-1226842710-project-admin] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653739', 'volume_id': 'b03f8959-d639-4179-af5f-70919bd7f154', 'name': 'volume-b03f8959-d639-4179-af5f-70919bd7f154', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a798c3f2-ccde-488e-8a14-21f4a04f8e12', 'attached_at': '', 'detached_at': '', 'volume_id': 'b03f8959-d639-4179-af5f-70919bd7f154', 'serial': 'b03f8959-d639-4179-af5f-70919bd7f154'} {{(pid=69328) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 699.415773] env[69328]: DEBUG nova.compute.manager [req-e340d5cf-4e0a-4df7-8502-3df345e595c9 req-0be44859-bf9d-467d-9cab-ca50c841f73c service nova] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Received event network-changed-67a5c2b8-cfa7-474e-91f4-f5b16fab46ca {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 699.416010] env[69328]: DEBUG nova.compute.manager [req-e340d5cf-4e0a-4df7-8502-3df345e595c9 req-0be44859-bf9d-467d-9cab-ca50c841f73c service nova] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Refreshing instance network info cache due to event network-changed-67a5c2b8-cfa7-474e-91f4-f5b16fab46ca. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 699.416178] env[69328]: DEBUG oslo_concurrency.lockutils [req-e340d5cf-4e0a-4df7-8502-3df345e595c9 req-0be44859-bf9d-467d-9cab-ca50c841f73c service nova] Acquiring lock "refresh_cache-b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 699.416337] env[69328]: DEBUG oslo_concurrency.lockutils [req-e340d5cf-4e0a-4df7-8502-3df345e595c9 req-0be44859-bf9d-467d-9cab-ca50c841f73c service nova] Acquired lock "refresh_cache-b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 699.416467] env[69328]: DEBUG nova.network.neutron [req-e340d5cf-4e0a-4df7-8502-3df345e595c9 req-0be44859-bf9d-467d-9cab-ca50c841f73c service nova] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Refreshing network info cache for port 67a5c2b8-cfa7-474e-91f4-f5b16fab46ca {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 699.495306] env[69328]: DEBUG nova.network.neutron [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Successfully created port: 09f50ba2-a927-40b1-a70f-37f75fbfd5ed {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 699.598706] env[69328]: DEBUG nova.compute.manager [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 699.634219] env[69328]: INFO nova.compute.manager [None req-97cf03e4-4e29-43d7-a258-bd6519233b35 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] instance snapshotting [ 699.637667] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a548ed1-dd9c-4e0e-b6ff-4db25dd512cd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.659625] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-134d8190-9995-4ec5-b5c2-e9e28466d765 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.714921] env[69328]: INFO nova.compute.manager [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] [instance: b7409a67-c140-436f-9c4e-27dae259f648] Took 50.96 seconds to build instance. [ 699.721687] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272970, 'name': CreateVM_Task, 'duration_secs': 0.342091} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.721687] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 699.722342] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 699.722503] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 699.722814] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 699.723073] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa0158c7-af06-4dda-a89a-4180aee2051f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.731041] env[69328]: DEBUG oslo_vmware.api [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 699.731041] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]526c473f-1b10-361b-388a-e13ed49d0af2" [ 699.731041] env[69328]: _type = "Task" [ 699.731041] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.739639] env[69328]: DEBUG oslo_vmware.api [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]526c473f-1b10-361b-388a-e13ed49d0af2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.919021] env[69328]: DEBUG nova.objects.instance [None req-660a892c-6ba5-4b1f-a71a-8a5d78d335cf tempest-VolumesAssistedSnapshotsTest-1226842710 tempest-VolumesAssistedSnapshotsTest-1226842710-project-admin] Lazy-loading 'flavor' on Instance uuid a798c3f2-ccde-488e-8a14-21f4a04f8e12 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 700.060378] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a811bc79-6d92-4ab4-9f38-814b41f75138 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.068906] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef8005e7-02be-4851-b4e6-47f09f0a5480 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.113440] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18273745-4e50-4651-b315-06b729020be0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.124053] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d76f5f4f-6597-4f27-97d1-e05605cc859e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.139167] env[69328]: DEBUG nova.compute.provider_tree [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 700.194293] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-97cf03e4-4e29-43d7-a258-bd6519233b35 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Creating Snapshot of the VM instance {{(pid=69328) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 700.194293] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-5143fe44-6ce7-40c3-8df7-e4834abc1b31 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.194293] env[69328]: DEBUG oslo_vmware.api [None req-97cf03e4-4e29-43d7-a258-bd6519233b35 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Waiting for the task: (returnval){ [ 700.194293] env[69328]: value = "task-3272971" [ 700.194293] env[69328]: _type = "Task" [ 700.194293] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.194293] env[69328]: DEBUG oslo_vmware.api [None req-97cf03e4-4e29-43d7-a258-bd6519233b35 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3272971, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.217025] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5ae09fe2-d261-4de3-b998-b729b670f8cb tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Lock "b7409a67-c140-436f-9c4e-27dae259f648" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.460s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 700.225579] env[69328]: DEBUG nova.network.neutron [req-e340d5cf-4e0a-4df7-8502-3df345e595c9 req-0be44859-bf9d-467d-9cab-ca50c841f73c service nova] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Updated VIF entry in instance network info cache for port 67a5c2b8-cfa7-474e-91f4-f5b16fab46ca. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 700.225920] env[69328]: DEBUG nova.network.neutron [req-e340d5cf-4e0a-4df7-8502-3df345e595c9 req-0be44859-bf9d-467d-9cab-ca50c841f73c service nova] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Updating instance_info_cache with network_info: [{"id": "67a5c2b8-cfa7-474e-91f4-f5b16fab46ca", "address": "fa:16:3e:1e:59:e3", "network": {"id": "e2ab6957-2c9d-4b95-91bd-5a62d9a284ba", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-967470666-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75d5853e3c724d02bacfa75173e38ab3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abcf0d10-3f3f-45dc-923e-1c78766e2dad", "external-id": "nsx-vlan-transportzone-405", "segmentation_id": 405, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67a5c2b8-cf", "ovs_interfaceid": "67a5c2b8-cfa7-474e-91f4-f5b16fab46ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 700.247122] env[69328]: DEBUG oslo_vmware.api [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]526c473f-1b10-361b-388a-e13ed49d0af2, 'name': SearchDatastore_Task, 'duration_secs': 0.010114} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.247122] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 700.247122] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 700.247232] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 700.247343] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 700.247506] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 700.247793] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9bdde506-9394-47fa-b1e0-3af9f47c3b84 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.256990] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 700.257235] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 700.257972] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91ed6265-2385-45cc-8f13-ad85f1f5a30b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.264393] env[69328]: DEBUG oslo_vmware.api [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 700.264393] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]525218d6-e6fc-0106-31ac-9ae396f1cf59" [ 700.264393] env[69328]: _type = "Task" [ 700.264393] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.274293] env[69328]: DEBUG oslo_vmware.api [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]525218d6-e6fc-0106-31ac-9ae396f1cf59, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.618905] env[69328]: DEBUG nova.compute.manager [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 700.642432] env[69328]: DEBUG nova.virt.hardware [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 700.642810] env[69328]: DEBUG nova.virt.hardware [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 700.642917] env[69328]: DEBUG nova.virt.hardware [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 700.643221] env[69328]: DEBUG nova.virt.hardware [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 700.643221] env[69328]: DEBUG nova.virt.hardware [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 700.643370] env[69328]: DEBUG nova.virt.hardware [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 700.643827] env[69328]: DEBUG nova.virt.hardware [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 700.643827] env[69328]: DEBUG nova.virt.hardware [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 700.643949] env[69328]: DEBUG nova.virt.hardware [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 700.644015] env[69328]: DEBUG nova.virt.hardware [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 700.644174] env[69328]: DEBUG nova.virt.hardware [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 700.645191] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e0bed38-c42c-4d14-b18c-9b7094ef6053 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.654026] env[69328]: DEBUG nova.scheduler.client.report [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 700.658324] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fb10653-07e8-49bf-8320-912428665c9e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.686431] env[69328]: DEBUG oslo_vmware.api [None req-97cf03e4-4e29-43d7-a258-bd6519233b35 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3272971, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.719233] env[69328]: DEBUG nova.compute.manager [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 700.728754] env[69328]: DEBUG oslo_concurrency.lockutils [req-e340d5cf-4e0a-4df7-8502-3df345e595c9 req-0be44859-bf9d-467d-9cab-ca50c841f73c service nova] Releasing lock "refresh_cache-b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 700.777924] env[69328]: DEBUG oslo_vmware.api [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]525218d6-e6fc-0106-31ac-9ae396f1cf59, 'name': SearchDatastore_Task, 'duration_secs': 0.009798} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.779033] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b00bc92c-f760-48e1-bd32-9843050fcd66 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.783626] env[69328]: DEBUG oslo_vmware.api [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 700.783626] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52aaf1e2-3a28-fe54-623f-4391cabfc863" [ 700.783626] env[69328]: _type = "Task" [ 700.783626] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.791210] env[69328]: DEBUG oslo_vmware.api [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52aaf1e2-3a28-fe54-623f-4391cabfc863, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.930504] env[69328]: DEBUG oslo_concurrency.lockutils [None req-660a892c-6ba5-4b1f-a71a-8a5d78d335cf tempest-VolumesAssistedSnapshotsTest-1226842710 tempest-VolumesAssistedSnapshotsTest-1226842710-project-admin] Lock "a798c3f2-ccde-488e-8a14-21f4a04f8e12" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.262s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 700.956727] env[69328]: DEBUG nova.network.neutron [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Successfully updated port: 09f50ba2-a927-40b1-a70f-37f75fbfd5ed {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 701.166687] env[69328]: DEBUG oslo_concurrency.lockutils [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.582s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 701.167619] env[69328]: DEBUG nova.compute.manager [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 701.171630] env[69328]: DEBUG oslo_concurrency.lockutils [None req-90aaef94-d57e-40dc-8d46-6941246a844e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 34.139s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 701.189848] env[69328]: DEBUG oslo_vmware.api [None req-97cf03e4-4e29-43d7-a258-bd6519233b35 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3272971, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.248956] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 701.297037] env[69328]: DEBUG oslo_vmware.api [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52aaf1e2-3a28-fe54-623f-4391cabfc863, 'name': SearchDatastore_Task, 'duration_secs': 0.010163} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.297037] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 701.297180] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25/b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 701.297432] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7b06ab83-71a7-4fa1-93a6-41ac7bf76ff0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.304273] env[69328]: DEBUG oslo_vmware.api [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 701.304273] env[69328]: value = "task-3272973" [ 701.304273] env[69328]: _type = "Task" [ 701.304273] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.312587] env[69328]: DEBUG oslo_vmware.api [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3272973, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.457976] env[69328]: DEBUG oslo_concurrency.lockutils [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Acquiring lock "refresh_cache-8e3a73c1-b622-47f4-99af-71b6dba7c09b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 701.458195] env[69328]: DEBUG oslo_concurrency.lockutils [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Acquired lock "refresh_cache-8e3a73c1-b622-47f4-99af-71b6dba7c09b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 701.458361] env[69328]: DEBUG nova.network.neutron [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 701.491066] env[69328]: DEBUG nova.compute.manager [req-70ccdd23-b11e-4672-b2d8-80aef5034c2a req-c9fec708-84f2-49d9-a881-14204026f994 service nova] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Received event network-vif-plugged-09f50ba2-a927-40b1-a70f-37f75fbfd5ed {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 701.491326] env[69328]: DEBUG oslo_concurrency.lockutils [req-70ccdd23-b11e-4672-b2d8-80aef5034c2a req-c9fec708-84f2-49d9-a881-14204026f994 service nova] Acquiring lock "8e3a73c1-b622-47f4-99af-71b6dba7c09b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 701.491544] env[69328]: DEBUG oslo_concurrency.lockutils [req-70ccdd23-b11e-4672-b2d8-80aef5034c2a req-c9fec708-84f2-49d9-a881-14204026f994 service nova] Lock "8e3a73c1-b622-47f4-99af-71b6dba7c09b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 701.491721] env[69328]: DEBUG oslo_concurrency.lockutils [req-70ccdd23-b11e-4672-b2d8-80aef5034c2a req-c9fec708-84f2-49d9-a881-14204026f994 service nova] Lock "8e3a73c1-b622-47f4-99af-71b6dba7c09b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 701.491889] env[69328]: DEBUG nova.compute.manager [req-70ccdd23-b11e-4672-b2d8-80aef5034c2a req-c9fec708-84f2-49d9-a881-14204026f994 service nova] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] No waiting events found dispatching network-vif-plugged-09f50ba2-a927-40b1-a70f-37f75fbfd5ed {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 701.492068] env[69328]: WARNING nova.compute.manager [req-70ccdd23-b11e-4672-b2d8-80aef5034c2a req-c9fec708-84f2-49d9-a881-14204026f994 service nova] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Received unexpected event network-vif-plugged-09f50ba2-a927-40b1-a70f-37f75fbfd5ed for instance with vm_state building and task_state spawning. [ 701.492232] env[69328]: DEBUG nova.compute.manager [req-70ccdd23-b11e-4672-b2d8-80aef5034c2a req-c9fec708-84f2-49d9-a881-14204026f994 service nova] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Received event network-changed-09f50ba2-a927-40b1-a70f-37f75fbfd5ed {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 701.492382] env[69328]: DEBUG nova.compute.manager [req-70ccdd23-b11e-4672-b2d8-80aef5034c2a req-c9fec708-84f2-49d9-a881-14204026f994 service nova] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Refreshing instance network info cache due to event network-changed-09f50ba2-a927-40b1-a70f-37f75fbfd5ed. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 701.492548] env[69328]: DEBUG oslo_concurrency.lockutils [req-70ccdd23-b11e-4672-b2d8-80aef5034c2a req-c9fec708-84f2-49d9-a881-14204026f994 service nova] Acquiring lock "refresh_cache-8e3a73c1-b622-47f4-99af-71b6dba7c09b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 701.675997] env[69328]: DEBUG nova.compute.utils [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 701.677482] env[69328]: DEBUG nova.compute.manager [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 701.678250] env[69328]: DEBUG nova.network.neutron [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 701.694227] env[69328]: DEBUG oslo_vmware.api [None req-97cf03e4-4e29-43d7-a258-bd6519233b35 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3272971, 'name': CreateSnapshot_Task, 'duration_secs': 1.058543} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.694227] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-97cf03e4-4e29-43d7-a258-bd6519233b35 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Created Snapshot of the VM instance {{(pid=69328) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 701.694380] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-036e13fa-aa13-4f84-a854-8c96e4d2db04 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.752050] env[69328]: DEBUG nova.policy [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fa91f3dc174d4c33afc82e56dd2bf758', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8502178b3d334c338b63dfde3eae8f08', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 701.818084] env[69328]: DEBUG oslo_vmware.api [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3272973, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.021403] env[69328]: DEBUG nova.network.neutron [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 702.182537] env[69328]: DEBUG nova.compute.manager [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 702.221268] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-97cf03e4-4e29-43d7-a258-bd6519233b35 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Creating linked-clone VM from snapshot {{(pid=69328) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 702.225232] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-4880de4c-2560-44f1-92a0-2610af11231c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.236959] env[69328]: DEBUG oslo_vmware.api [None req-97cf03e4-4e29-43d7-a258-bd6519233b35 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Waiting for the task: (returnval){ [ 702.236959] env[69328]: value = "task-3272974" [ 702.236959] env[69328]: _type = "Task" [ 702.236959] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.245982] env[69328]: DEBUG oslo_vmware.api [None req-97cf03e4-4e29-43d7-a258-bd6519233b35 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3272974, 'name': CloneVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.263817] env[69328]: DEBUG nova.network.neutron [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Successfully created port: f159b639-986d-4584-94e0-589e890cb653 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 702.276896] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfd11b4b-b4e0-4d39-89ad-ee814b38b2ae {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.284301] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7709823e-d321-4e2b-bd3f-2760f4b2cefa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.320450] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8991f68c-11df-4e5b-a6b6-d80c8aa53b43 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.325053] env[69328]: DEBUG nova.network.neutron [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Updating instance_info_cache with network_info: [{"id": "09f50ba2-a927-40b1-a70f-37f75fbfd5ed", "address": "fa:16:3e:40:fc:42", "network": {"id": "edfe6995-ac16-4086-9cb4-efcda93045c6", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-703565163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e8bc0d144f44546bd21fb04277c998c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "196c2dd2-7ffc-4f7d-9c93-e1ef0a6a3a9f", "external-id": "nsx-vlan-transportzone-584", "segmentation_id": 584, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09f50ba2-a9", "ovs_interfaceid": "09f50ba2-a927-40b1-a70f-37f75fbfd5ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 702.331478] env[69328]: DEBUG oslo_vmware.api [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3272973, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.732497} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.332073] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25/b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 702.332296] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 702.333517] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-076fa798-9fb1-4775-884f-702226c620c8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.337286] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-031a785f-54dd-4b2f-b77e-a92240b1e6d4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.351950] env[69328]: DEBUG nova.compute.provider_tree [None req-90aaef94-d57e-40dc-8d46-6941246a844e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 702.358054] env[69328]: DEBUG oslo_vmware.api [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 702.358054] env[69328]: value = "task-3272975" [ 702.358054] env[69328]: _type = "Task" [ 702.358054] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.363903] env[69328]: DEBUG oslo_vmware.api [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3272975, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.486677] env[69328]: DEBUG oslo_concurrency.lockutils [None req-af64df0c-6b85-49fd-a4d1-e6f242033274 tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Acquiring lock "b7409a67-c140-436f-9c4e-27dae259f648" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 702.487046] env[69328]: DEBUG oslo_concurrency.lockutils [None req-af64df0c-6b85-49fd-a4d1-e6f242033274 tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Lock "b7409a67-c140-436f-9c4e-27dae259f648" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 702.487306] env[69328]: DEBUG oslo_concurrency.lockutils [None req-af64df0c-6b85-49fd-a4d1-e6f242033274 tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Acquiring lock "b7409a67-c140-436f-9c4e-27dae259f648-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 702.487459] env[69328]: DEBUG oslo_concurrency.lockutils [None req-af64df0c-6b85-49fd-a4d1-e6f242033274 tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Lock "b7409a67-c140-436f-9c4e-27dae259f648-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 702.487624] env[69328]: DEBUG oslo_concurrency.lockutils [None req-af64df0c-6b85-49fd-a4d1-e6f242033274 tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Lock "b7409a67-c140-436f-9c4e-27dae259f648-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 702.489903] env[69328]: INFO nova.compute.manager [None req-af64df0c-6b85-49fd-a4d1-e6f242033274 tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] [instance: b7409a67-c140-436f-9c4e-27dae259f648] Terminating instance [ 702.748744] env[69328]: DEBUG oslo_vmware.api [None req-97cf03e4-4e29-43d7-a258-bd6519233b35 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3272974, 'name': CloneVM_Task} progress is 94%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.833445] env[69328]: DEBUG oslo_concurrency.lockutils [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Releasing lock "refresh_cache-8e3a73c1-b622-47f4-99af-71b6dba7c09b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 702.833791] env[69328]: DEBUG nova.compute.manager [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Instance network_info: |[{"id": "09f50ba2-a927-40b1-a70f-37f75fbfd5ed", "address": "fa:16:3e:40:fc:42", "network": {"id": "edfe6995-ac16-4086-9cb4-efcda93045c6", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-703565163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e8bc0d144f44546bd21fb04277c998c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "196c2dd2-7ffc-4f7d-9c93-e1ef0a6a3a9f", "external-id": "nsx-vlan-transportzone-584", "segmentation_id": 584, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09f50ba2-a9", "ovs_interfaceid": "09f50ba2-a927-40b1-a70f-37f75fbfd5ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 702.834117] env[69328]: DEBUG oslo_concurrency.lockutils [req-70ccdd23-b11e-4672-b2d8-80aef5034c2a req-c9fec708-84f2-49d9-a881-14204026f994 service nova] Acquired lock "refresh_cache-8e3a73c1-b622-47f4-99af-71b6dba7c09b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 702.834299] env[69328]: DEBUG nova.network.neutron [req-70ccdd23-b11e-4672-b2d8-80aef5034c2a req-c9fec708-84f2-49d9-a881-14204026f994 service nova] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Refreshing network info cache for port 09f50ba2-a927-40b1-a70f-37f75fbfd5ed {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 702.835473] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:40:fc:42', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '196c2dd2-7ffc-4f7d-9c93-e1ef0a6a3a9f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '09f50ba2-a927-40b1-a70f-37f75fbfd5ed', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 702.842817] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Creating folder: Project (4e8bc0d144f44546bd21fb04277c998c). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 702.843762] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c2d34ec9-f869-48ef-a3b2-0fc4dbcbe0cc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.855232] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Created folder: Project (4e8bc0d144f44546bd21fb04277c998c) in parent group-v653649. [ 702.855435] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Creating folder: Instances. Parent ref: group-v653748. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 702.859210] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-feceaaf0-2f55-45eb-aded-e3e45ba0c808 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.870687] env[69328]: DEBUG oslo_vmware.api [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3272975, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.204299} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.870934] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 702.871681] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5376e3df-81e2-4da4-836a-34a1cd8b5dae {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.874931] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Created folder: Instances in parent group-v653748. [ 702.875171] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 702.875641] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 702.875841] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-88a42f71-7039-4b7f-b95a-e2a08f2c56a6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.899049] env[69328]: DEBUG nova.scheduler.client.report [None req-90aaef94-d57e-40dc-8d46-6941246a844e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Updated inventory for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with generation 56 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 702.899312] env[69328]: DEBUG nova.compute.provider_tree [None req-90aaef94-d57e-40dc-8d46-6941246a844e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Updating resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e generation from 56 to 57 during operation: update_inventory {{(pid=69328) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 702.899488] env[69328]: DEBUG nova.compute.provider_tree [None req-90aaef94-d57e-40dc-8d46-6941246a844e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 702.911329] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Reconfiguring VM instance instance-0000001d to attach disk [datastore1] b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25/b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 702.912325] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2f01c026-c861-4bf1-a5ff-329245c55e99 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.927943] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 702.927943] env[69328]: value = "task-3272978" [ 702.927943] env[69328]: _type = "Task" [ 702.927943] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.935633] env[69328]: DEBUG oslo_vmware.api [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 702.935633] env[69328]: value = "task-3272979" [ 702.935633] env[69328]: _type = "Task" [ 702.935633] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.944172] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272978, 'name': CreateVM_Task} progress is 10%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.951054] env[69328]: DEBUG oslo_vmware.api [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3272979, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.996672] env[69328]: DEBUG nova.compute.manager [None req-af64df0c-6b85-49fd-a4d1-e6f242033274 tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] [instance: b7409a67-c140-436f-9c4e-27dae259f648] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 702.996672] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-af64df0c-6b85-49fd-a4d1-e6f242033274 tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] [instance: b7409a67-c140-436f-9c4e-27dae259f648] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 702.996672] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d9f016d-4b39-4217-b4c6-13637a465cfa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.003537] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-af64df0c-6b85-49fd-a4d1-e6f242033274 tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] [instance: b7409a67-c140-436f-9c4e-27dae259f648] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 703.003737] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f3f99bd1-5eeb-4aba-a39e-8a1f18c36899 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.010239] env[69328]: DEBUG oslo_vmware.api [None req-af64df0c-6b85-49fd-a4d1-e6f242033274 tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Waiting for the task: (returnval){ [ 703.010239] env[69328]: value = "task-3272980" [ 703.010239] env[69328]: _type = "Task" [ 703.010239] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.018885] env[69328]: DEBUG oslo_vmware.api [None req-af64df0c-6b85-49fd-a4d1-e6f242033274 tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Task: {'id': task-3272980, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.194076] env[69328]: DEBUG nova.compute.manager [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 703.215850] env[69328]: DEBUG nova.virt.hardware [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 703.216137] env[69328]: DEBUG nova.virt.hardware [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 703.216308] env[69328]: DEBUG nova.virt.hardware [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 703.216492] env[69328]: DEBUG nova.virt.hardware [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 703.216637] env[69328]: DEBUG nova.virt.hardware [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 703.216792] env[69328]: DEBUG nova.virt.hardware [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 703.217111] env[69328]: DEBUG nova.virt.hardware [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 703.217330] env[69328]: DEBUG nova.virt.hardware [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 703.217509] env[69328]: DEBUG nova.virt.hardware [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 703.217692] env[69328]: DEBUG nova.virt.hardware [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 703.217871] env[69328]: DEBUG nova.virt.hardware [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 703.218831] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a53df128-7955-44e9-9c9e-4d6814bb800e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.227244] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ddf83bb-26a9-46b2-8485-0627725e5cb4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.249215] env[69328]: DEBUG oslo_vmware.api [None req-97cf03e4-4e29-43d7-a258-bd6519233b35 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3272974, 'name': CloneVM_Task} progress is 94%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.445807] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272978, 'name': CreateVM_Task} progress is 25%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.451279] env[69328]: DEBUG oslo_vmware.api [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3272979, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.523395] env[69328]: DEBUG oslo_vmware.api [None req-af64df0c-6b85-49fd-a4d1-e6f242033274 tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Task: {'id': task-3272980, 'name': PowerOffVM_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.624258] env[69328]: DEBUG nova.network.neutron [req-70ccdd23-b11e-4672-b2d8-80aef5034c2a req-c9fec708-84f2-49d9-a881-14204026f994 service nova] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Updated VIF entry in instance network info cache for port 09f50ba2-a927-40b1-a70f-37f75fbfd5ed. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 703.625083] env[69328]: DEBUG nova.network.neutron [req-70ccdd23-b11e-4672-b2d8-80aef5034c2a req-c9fec708-84f2-49d9-a881-14204026f994 service nova] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Updating instance_info_cache with network_info: [{"id": "09f50ba2-a927-40b1-a70f-37f75fbfd5ed", "address": "fa:16:3e:40:fc:42", "network": {"id": "edfe6995-ac16-4086-9cb4-efcda93045c6", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-703565163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e8bc0d144f44546bd21fb04277c998c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "196c2dd2-7ffc-4f7d-9c93-e1ef0a6a3a9f", "external-id": "nsx-vlan-transportzone-584", "segmentation_id": 584, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09f50ba2-a9", "ovs_interfaceid": "09f50ba2-a927-40b1-a70f-37f75fbfd5ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 703.756409] env[69328]: DEBUG oslo_vmware.api [None req-97cf03e4-4e29-43d7-a258-bd6519233b35 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3272974, 'name': CloneVM_Task} progress is 95%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.893872] env[69328]: DEBUG nova.compute.manager [req-4cf7f36a-488a-49f1-af79-012244898916 req-ff97ecd7-d36d-4ffa-a1a5-b29cb4c9568d service nova] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Received event network-vif-plugged-f159b639-986d-4584-94e0-589e890cb653 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 703.893872] env[69328]: DEBUG oslo_concurrency.lockutils [req-4cf7f36a-488a-49f1-af79-012244898916 req-ff97ecd7-d36d-4ffa-a1a5-b29cb4c9568d service nova] Acquiring lock "e1eec0ce-8df7-402a-b628-5dfdc11949e7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 703.894012] env[69328]: DEBUG oslo_concurrency.lockutils [req-4cf7f36a-488a-49f1-af79-012244898916 req-ff97ecd7-d36d-4ffa-a1a5-b29cb4c9568d service nova] Lock "e1eec0ce-8df7-402a-b628-5dfdc11949e7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 703.894149] env[69328]: DEBUG oslo_concurrency.lockutils [req-4cf7f36a-488a-49f1-af79-012244898916 req-ff97ecd7-d36d-4ffa-a1a5-b29cb4c9568d service nova] Lock "e1eec0ce-8df7-402a-b628-5dfdc11949e7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 703.894371] env[69328]: DEBUG nova.compute.manager [req-4cf7f36a-488a-49f1-af79-012244898916 req-ff97ecd7-d36d-4ffa-a1a5-b29cb4c9568d service nova] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] No waiting events found dispatching network-vif-plugged-f159b639-986d-4584-94e0-589e890cb653 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 703.894543] env[69328]: WARNING nova.compute.manager [req-4cf7f36a-488a-49f1-af79-012244898916 req-ff97ecd7-d36d-4ffa-a1a5-b29cb4c9568d service nova] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Received unexpected event network-vif-plugged-f159b639-986d-4584-94e0-589e890cb653 for instance with vm_state building and task_state spawning. [ 703.916585] env[69328]: DEBUG nova.network.neutron [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Successfully updated port: f159b639-986d-4584-94e0-589e890cb653 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 703.922271] env[69328]: DEBUG oslo_concurrency.lockutils [None req-90aaef94-d57e-40dc-8d46-6941246a844e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.751s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 703.926516] env[69328]: DEBUG oslo_concurrency.lockutils [None req-aa19d4f8-aed4-4937-81d3-93f3297da951 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.611s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 703.926802] env[69328]: DEBUG nova.objects.instance [None req-aa19d4f8-aed4-4937-81d3-93f3297da951 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Lazy-loading 'resources' on Instance uuid 9753734d-90f0-4661-8029-ec312e88eb60 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 703.943866] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272978, 'name': CreateVM_Task, 'duration_secs': 0.825117} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.944585] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 703.945455] env[69328]: DEBUG oslo_concurrency.lockutils [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 703.945501] env[69328]: DEBUG oslo_concurrency.lockutils [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 703.945844] env[69328]: DEBUG oslo_concurrency.lockutils [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 703.949890] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-281310cf-bc39-4ce4-b6f5-407de7c2428a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.950834] env[69328]: DEBUG oslo_vmware.api [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3272979, 'name': ReconfigVM_Task, 'duration_secs': 0.85864} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.951106] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Reconfigured VM instance instance-0000001d to attach disk [datastore1] b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25/b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 703.952252] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-533d7a16-30a2-4099-8c1f-f064cbcb75eb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.957709] env[69328]: DEBUG oslo_vmware.api [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Waiting for the task: (returnval){ [ 703.957709] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52af5ba9-828a-6dd2-e60d-e98811006dbe" [ 703.957709] env[69328]: _type = "Task" [ 703.957709] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.964349] env[69328]: DEBUG oslo_vmware.api [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 703.964349] env[69328]: value = "task-3272981" [ 703.964349] env[69328]: _type = "Task" [ 703.964349] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.971941] env[69328]: DEBUG oslo_vmware.api [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52af5ba9-828a-6dd2-e60d-e98811006dbe, 'name': SearchDatastore_Task, 'duration_secs': 0.008922} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.972599] env[69328]: DEBUG oslo_concurrency.lockutils [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 703.972833] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 703.973083] env[69328]: DEBUG oslo_concurrency.lockutils [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 703.973232] env[69328]: DEBUG oslo_concurrency.lockutils [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 703.973409] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 703.973904] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-21e52fa8-b0be-4e4b-9593-080ec799455f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.978924] env[69328]: DEBUG oslo_vmware.api [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3272981, 'name': Rename_Task} progress is 10%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.985794] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 703.985972] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 703.986723] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3cded7bc-33dc-49ce-b235-41e2e22b8581 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.992562] env[69328]: DEBUG oslo_vmware.api [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Waiting for the task: (returnval){ [ 703.992562] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5223624c-896d-4012-9689-4c0cfded3b7f" [ 703.992562] env[69328]: _type = "Task" [ 703.992562] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.002739] env[69328]: DEBUG oslo_vmware.api [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5223624c-896d-4012-9689-4c0cfded3b7f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.019640] env[69328]: DEBUG oslo_vmware.api [None req-af64df0c-6b85-49fd-a4d1-e6f242033274 tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Task: {'id': task-3272980, 'name': PowerOffVM_Task, 'duration_secs': 0.577397} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.019898] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-af64df0c-6b85-49fd-a4d1-e6f242033274 tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] [instance: b7409a67-c140-436f-9c4e-27dae259f648] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 704.020082] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-af64df0c-6b85-49fd-a4d1-e6f242033274 tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] [instance: b7409a67-c140-436f-9c4e-27dae259f648] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 704.020325] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-421e6c52-df86-406c-85cc-fc9c70685555 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.081525] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-af64df0c-6b85-49fd-a4d1-e6f242033274 tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] [instance: b7409a67-c140-436f-9c4e-27dae259f648] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 704.081748] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-af64df0c-6b85-49fd-a4d1-e6f242033274 tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] [instance: b7409a67-c140-436f-9c4e-27dae259f648] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 704.081929] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-af64df0c-6b85-49fd-a4d1-e6f242033274 tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Deleting the datastore file [datastore1] b7409a67-c140-436f-9c4e-27dae259f648 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 704.082206] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c2a992d1-b401-4fb4-a0da-983a90278e57 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.088391] env[69328]: DEBUG oslo_vmware.api [None req-af64df0c-6b85-49fd-a4d1-e6f242033274 tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Waiting for the task: (returnval){ [ 704.088391] env[69328]: value = "task-3272983" [ 704.088391] env[69328]: _type = "Task" [ 704.088391] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.096134] env[69328]: DEBUG oslo_vmware.api [None req-af64df0c-6b85-49fd-a4d1-e6f242033274 tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Task: {'id': task-3272983, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.132407] env[69328]: DEBUG oslo_concurrency.lockutils [req-70ccdd23-b11e-4672-b2d8-80aef5034c2a req-c9fec708-84f2-49d9-a881-14204026f994 service nova] Releasing lock "refresh_cache-8e3a73c1-b622-47f4-99af-71b6dba7c09b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 704.252879] env[69328]: DEBUG oslo_vmware.api [None req-97cf03e4-4e29-43d7-a258-bd6519233b35 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3272974, 'name': CloneVM_Task, 'duration_secs': 1.683394} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.253172] env[69328]: INFO nova.virt.vmwareapi.vmops [None req-97cf03e4-4e29-43d7-a258-bd6519233b35 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Created linked-clone VM from snapshot [ 704.253917] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fced37a3-1449-4bc0-a7d7-c4358e321f66 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.261437] env[69328]: DEBUG nova.virt.vmwareapi.images [None req-97cf03e4-4e29-43d7-a258-bd6519233b35 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Uploading image 80556425-0db6-4f16-b889-eb057a86400c {{(pid=69328) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 704.281273] env[69328]: DEBUG oslo_vmware.rw_handles [None req-97cf03e4-4e29-43d7-a258-bd6519233b35 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 704.281273] env[69328]: value = "vm-653747" [ 704.281273] env[69328]: _type = "VirtualMachine" [ 704.281273] env[69328]: }. {{(pid=69328) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 704.281518] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-d0f0c224-b89e-4ce7-b26a-25a5a534f94a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.288331] env[69328]: DEBUG oslo_vmware.rw_handles [None req-97cf03e4-4e29-43d7-a258-bd6519233b35 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Lease: (returnval){ [ 704.288331] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52a03fb8-f985-701c-bae2-23ab009e19ae" [ 704.288331] env[69328]: _type = "HttpNfcLease" [ 704.288331] env[69328]: } obtained for exporting VM: (result){ [ 704.288331] env[69328]: value = "vm-653747" [ 704.288331] env[69328]: _type = "VirtualMachine" [ 704.288331] env[69328]: }. {{(pid=69328) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 704.288585] env[69328]: DEBUG oslo_vmware.api [None req-97cf03e4-4e29-43d7-a258-bd6519233b35 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Waiting for the lease: (returnval){ [ 704.288585] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52a03fb8-f985-701c-bae2-23ab009e19ae" [ 704.288585] env[69328]: _type = "HttpNfcLease" [ 704.288585] env[69328]: } to be ready. {{(pid=69328) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 704.294836] env[69328]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 704.294836] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52a03fb8-f985-701c-bae2-23ab009e19ae" [ 704.294836] env[69328]: _type = "HttpNfcLease" [ 704.294836] env[69328]: } is initializing. {{(pid=69328) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 704.368635] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d9988052-6a38-48dd-86f7-6ef56deaba6f tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Acquiring lock "a798c3f2-ccde-488e-8a14-21f4a04f8e12" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 704.368920] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d9988052-6a38-48dd-86f7-6ef56deaba6f tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Lock "a798c3f2-ccde-488e-8a14-21f4a04f8e12" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 704.369170] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d9988052-6a38-48dd-86f7-6ef56deaba6f tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Acquiring lock "a798c3f2-ccde-488e-8a14-21f4a04f8e12-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 704.369357] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d9988052-6a38-48dd-86f7-6ef56deaba6f tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Lock "a798c3f2-ccde-488e-8a14-21f4a04f8e12-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 704.369529] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d9988052-6a38-48dd-86f7-6ef56deaba6f tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Lock "a798c3f2-ccde-488e-8a14-21f4a04f8e12-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 704.372196] env[69328]: INFO nova.compute.manager [None req-d9988052-6a38-48dd-86f7-6ef56deaba6f tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Terminating instance [ 704.419274] env[69328]: DEBUG oslo_concurrency.lockutils [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Acquiring lock "refresh_cache-e1eec0ce-8df7-402a-b628-5dfdc11949e7" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 704.419394] env[69328]: DEBUG oslo_concurrency.lockutils [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Acquired lock "refresh_cache-e1eec0ce-8df7-402a-b628-5dfdc11949e7" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 704.419599] env[69328]: DEBUG nova.network.neutron [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 704.477446] env[69328]: DEBUG oslo_vmware.api [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3272981, 'name': Rename_Task, 'duration_secs': 0.134141} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.478376] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 704.479052] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-48394e71-31a4-42f1-b98d-c6419f2ffd46 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.485753] env[69328]: DEBUG oslo_vmware.api [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 704.485753] env[69328]: value = "task-3272985" [ 704.485753] env[69328]: _type = "Task" [ 704.485753] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.492434] env[69328]: INFO nova.scheduler.client.report [None req-90aaef94-d57e-40dc-8d46-6941246a844e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Deleted allocation for migration 3db95d5c-678c-43e6-b21d-735678c13948 [ 704.506073] env[69328]: DEBUG oslo_vmware.api [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3272985, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.511789] env[69328]: DEBUG oslo_vmware.api [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5223624c-896d-4012-9689-4c0cfded3b7f, 'name': SearchDatastore_Task, 'duration_secs': 0.008425} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.512587] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7da66a60-17a8-4abb-8baf-3285c8c5af6c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.522132] env[69328]: DEBUG oslo_vmware.api [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Waiting for the task: (returnval){ [ 704.522132] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]524b8bc4-f922-bd25-b303-e9ba185c21be" [ 704.522132] env[69328]: _type = "Task" [ 704.522132] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.530108] env[69328]: DEBUG oslo_vmware.api [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]524b8bc4-f922-bd25-b303-e9ba185c21be, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.600384] env[69328]: DEBUG oslo_vmware.api [None req-af64df0c-6b85-49fd-a4d1-e6f242033274 tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Task: {'id': task-3272983, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.148768} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.600663] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-af64df0c-6b85-49fd-a4d1-e6f242033274 tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 704.600873] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-af64df0c-6b85-49fd-a4d1-e6f242033274 tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] [instance: b7409a67-c140-436f-9c4e-27dae259f648] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 704.601060] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-af64df0c-6b85-49fd-a4d1-e6f242033274 tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] [instance: b7409a67-c140-436f-9c4e-27dae259f648] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 704.601237] env[69328]: INFO nova.compute.manager [None req-af64df0c-6b85-49fd-a4d1-e6f242033274 tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] [instance: b7409a67-c140-436f-9c4e-27dae259f648] Took 1.61 seconds to destroy the instance on the hypervisor. [ 704.601478] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-af64df0c-6b85-49fd-a4d1-e6f242033274 tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 704.601666] env[69328]: DEBUG nova.compute.manager [-] [instance: b7409a67-c140-436f-9c4e-27dae259f648] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 704.601846] env[69328]: DEBUG nova.network.neutron [-] [instance: b7409a67-c140-436f-9c4e-27dae259f648] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 704.806688] env[69328]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 704.806688] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52a03fb8-f985-701c-bae2-23ab009e19ae" [ 704.806688] env[69328]: _type = "HttpNfcLease" [ 704.806688] env[69328]: } is ready. {{(pid=69328) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 704.807071] env[69328]: DEBUG oslo_vmware.rw_handles [None req-97cf03e4-4e29-43d7-a258-bd6519233b35 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 704.807071] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52a03fb8-f985-701c-bae2-23ab009e19ae" [ 704.807071] env[69328]: _type = "HttpNfcLease" [ 704.807071] env[69328]: }. {{(pid=69328) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 704.807739] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d524328-329f-482d-9a6c-f1261f7cf517 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.817218] env[69328]: DEBUG oslo_vmware.rw_handles [None req-97cf03e4-4e29-43d7-a258-bd6519233b35 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ba609a-ed0f-a223-b5a4-0c7b7b167131/disk-0.vmdk from lease info. {{(pid=69328) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 704.817411] env[69328]: DEBUG oslo_vmware.rw_handles [None req-97cf03e4-4e29-43d7-a258-bd6519233b35 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ba609a-ed0f-a223-b5a4-0c7b7b167131/disk-0.vmdk for reading. {{(pid=69328) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 704.880165] env[69328]: DEBUG nova.compute.manager [None req-d9988052-6a38-48dd-86f7-6ef56deaba6f tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 704.880450] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d9988052-6a38-48dd-86f7-6ef56deaba6f tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 704.884025] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-339db7a1-5bec-4671-89c6-e4c5da1b6d26 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.892847] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9988052-6a38-48dd-86f7-6ef56deaba6f tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 704.893125] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-63bda72c-5d22-47de-aee0-aaa823789090 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.902577] env[69328]: DEBUG oslo_vmware.api [None req-d9988052-6a38-48dd-86f7-6ef56deaba6f tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Waiting for the task: (returnval){ [ 704.902577] env[69328]: value = "task-3272986" [ 704.902577] env[69328]: _type = "Task" [ 704.902577] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.911529] env[69328]: DEBUG oslo_vmware.api [None req-d9988052-6a38-48dd-86f7-6ef56deaba6f tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Task: {'id': task-3272986, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.921128] env[69328]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-6e5e5d04-1d6e-4b52-b88e-e1fdc769e738 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.968349] env[69328]: DEBUG nova.network.neutron [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 705.002581] env[69328]: DEBUG oslo_vmware.api [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3272985, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.006958] env[69328]: DEBUG oslo_concurrency.lockutils [None req-90aaef94-d57e-40dc-8d46-6941246a844e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Lock "d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 41.402s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 705.035641] env[69328]: DEBUG oslo_vmware.api [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]524b8bc4-f922-bd25-b303-e9ba185c21be, 'name': SearchDatastore_Task, 'duration_secs': 0.017373} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.035904] env[69328]: DEBUG oslo_concurrency.lockutils [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 705.036182] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 8e3a73c1-b622-47f4-99af-71b6dba7c09b/8e3a73c1-b622-47f4-99af-71b6dba7c09b.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 705.036457] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-87adfb4d-082b-4608-8cfc-060ec57cb1bb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.043688] env[69328]: DEBUG oslo_vmware.api [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Waiting for the task: (returnval){ [ 705.043688] env[69328]: value = "task-3272987" [ 705.043688] env[69328]: _type = "Task" [ 705.043688] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.051788] env[69328]: DEBUG oslo_vmware.api [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3272987, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.133326] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef2534ef-8d62-403f-8305-7e245c0dfbe0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.140878] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-716b6b75-2a97-41b8-82e7-b9215d7adeaa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.178938] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d73503bf-a5c8-47c8-9d17-7d3df7d4ec44 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.182432] env[69328]: DEBUG nova.network.neutron [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Updating instance_info_cache with network_info: [{"id": "f159b639-986d-4584-94e0-589e890cb653", "address": "fa:16:3e:78:ba:5e", "network": {"id": "b7b15f77-0584-4f19-a05e-67df3efe1b9d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-778653716-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8502178b3d334c338b63dfde3eae8f08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d275d7c6-2a7b-4ee8-b6f4-fabf1ba1905f", "external-id": "nsx-vlan-transportzone-513", "segmentation_id": 513, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf159b639-98", "ovs_interfaceid": "f159b639-986d-4584-94e0-589e890cb653", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 705.189576] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d74d548c-bb4c-4ef7-a044-fe5eea46488b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.204091] env[69328]: DEBUG nova.compute.provider_tree [None req-aa19d4f8-aed4-4937-81d3-93f3297da951 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 705.413717] env[69328]: DEBUG oslo_vmware.api [None req-d9988052-6a38-48dd-86f7-6ef56deaba6f tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Task: {'id': task-3272986, 'name': PowerOffVM_Task, 'duration_secs': 0.232557} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.414333] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9988052-6a38-48dd-86f7-6ef56deaba6f tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 705.415776] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d9988052-6a38-48dd-86f7-6ef56deaba6f tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 705.415776] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-67c17243-49da-450a-8d2a-75905384a56b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.475694] env[69328]: DEBUG nova.network.neutron [-] [instance: b7409a67-c140-436f-9c4e-27dae259f648] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 705.494339] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d9988052-6a38-48dd-86f7-6ef56deaba6f tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 705.495379] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d9988052-6a38-48dd-86f7-6ef56deaba6f tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 705.497627] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9988052-6a38-48dd-86f7-6ef56deaba6f tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Deleting the datastore file [datastore1] a798c3f2-ccde-488e-8a14-21f4a04f8e12 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 705.499384] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dae0e786-b9f2-4816-b7db-e8ba6f688ae9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.506717] env[69328]: DEBUG oslo_vmware.api [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3272985, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.514568] env[69328]: DEBUG oslo_vmware.api [None req-d9988052-6a38-48dd-86f7-6ef56deaba6f tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Waiting for the task: (returnval){ [ 705.514568] env[69328]: value = "task-3272989" [ 705.514568] env[69328]: _type = "Task" [ 705.514568] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.527032] env[69328]: DEBUG oslo_vmware.api [None req-d9988052-6a38-48dd-86f7-6ef56deaba6f tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Task: {'id': task-3272989, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.557992] env[69328]: DEBUG oslo_vmware.api [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3272987, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.685372] env[69328]: DEBUG oslo_concurrency.lockutils [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Releasing lock "refresh_cache-e1eec0ce-8df7-402a-b628-5dfdc11949e7" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 705.685727] env[69328]: DEBUG nova.compute.manager [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Instance network_info: |[{"id": "f159b639-986d-4584-94e0-589e890cb653", "address": "fa:16:3e:78:ba:5e", "network": {"id": "b7b15f77-0584-4f19-a05e-67df3efe1b9d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-778653716-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8502178b3d334c338b63dfde3eae8f08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d275d7c6-2a7b-4ee8-b6f4-fabf1ba1905f", "external-id": "nsx-vlan-transportzone-513", "segmentation_id": 513, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf159b639-98", "ovs_interfaceid": "f159b639-986d-4584-94e0-589e890cb653", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 705.686205] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:78:ba:5e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd275d7c6-2a7b-4ee8-b6f4-fabf1ba1905f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f159b639-986d-4584-94e0-589e890cb653', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 705.699346] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 705.699788] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 705.700042] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ed132a9c-9682-44c3-a139-02fe7918bce5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.725216] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 705.725216] env[69328]: value = "task-3272990" [ 705.725216] env[69328]: _type = "Task" [ 705.725216] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.733830] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272990, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.738309] env[69328]: ERROR nova.scheduler.client.report [None req-aa19d4f8-aed4-4937-81d3-93f3297da951 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [req-93b00e99-0f33-4bdd-bcf5-213155d1143e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-93b00e99-0f33-4bdd-bcf5-213155d1143e"}]} [ 705.759651] env[69328]: DEBUG nova.scheduler.client.report [None req-aa19d4f8-aed4-4937-81d3-93f3297da951 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Refreshing inventories for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 705.782135] env[69328]: DEBUG nova.scheduler.client.report [None req-aa19d4f8-aed4-4937-81d3-93f3297da951 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Updating ProviderTree inventory for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 705.782382] env[69328]: DEBUG nova.compute.provider_tree [None req-aa19d4f8-aed4-4937-81d3-93f3297da951 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 705.796280] env[69328]: DEBUG nova.scheduler.client.report [None req-aa19d4f8-aed4-4937-81d3-93f3297da951 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Refreshing aggregate associations for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e, aggregates: None {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 705.823895] env[69328]: DEBUG nova.scheduler.client.report [None req-aa19d4f8-aed4-4937-81d3-93f3297da951 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Refreshing trait associations for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e, traits: COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 705.979726] env[69328]: INFO nova.compute.manager [-] [instance: b7409a67-c140-436f-9c4e-27dae259f648] Took 1.38 seconds to deallocate network for instance. [ 706.001404] env[69328]: DEBUG oslo_vmware.api [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3272985, 'name': PowerOnVM_Task, 'duration_secs': 1.226831} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.001660] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 706.001861] env[69328]: INFO nova.compute.manager [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Took 9.32 seconds to spawn the instance on the hypervisor. [ 706.004269] env[69328]: DEBUG nova.compute.manager [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 706.004269] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d67825c3-a371-4b59-a1da-757908f5b329 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.029982] env[69328]: DEBUG oslo_vmware.api [None req-d9988052-6a38-48dd-86f7-6ef56deaba6f tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Task: {'id': task-3272989, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.288075} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.030775] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9988052-6a38-48dd-86f7-6ef56deaba6f tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 706.031058] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d9988052-6a38-48dd-86f7-6ef56deaba6f tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 706.031382] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d9988052-6a38-48dd-86f7-6ef56deaba6f tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 706.031611] env[69328]: INFO nova.compute.manager [None req-d9988052-6a38-48dd-86f7-6ef56deaba6f tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Took 1.15 seconds to destroy the instance on the hypervisor. [ 706.031912] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d9988052-6a38-48dd-86f7-6ef56deaba6f tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 706.033186] env[69328]: DEBUG nova.compute.manager [-] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 706.033280] env[69328]: DEBUG nova.network.neutron [-] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 706.061058] env[69328]: DEBUG oslo_vmware.api [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3272987, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.575076} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.061407] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 8e3a73c1-b622-47f4-99af-71b6dba7c09b/8e3a73c1-b622-47f4-99af-71b6dba7c09b.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 706.061674] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 706.064877] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f78495a1-9fa9-4d70-b537-f9f52bc2e5ec {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.072839] env[69328]: DEBUG oslo_vmware.api [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Waiting for the task: (returnval){ [ 706.072839] env[69328]: value = "task-3272991" [ 706.072839] env[69328]: _type = "Task" [ 706.072839] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.089895] env[69328]: DEBUG oslo_vmware.api [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3272991, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.112380] env[69328]: DEBUG nova.compute.manager [req-3c8158c0-3937-469e-8754-4992578f43ce req-d53a9b69-69eb-4d13-bc20-855df862a032 service nova] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Received event network-changed-f159b639-986d-4584-94e0-589e890cb653 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 706.112621] env[69328]: DEBUG nova.compute.manager [req-3c8158c0-3937-469e-8754-4992578f43ce req-d53a9b69-69eb-4d13-bc20-855df862a032 service nova] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Refreshing instance network info cache due to event network-changed-f159b639-986d-4584-94e0-589e890cb653. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 706.112880] env[69328]: DEBUG oslo_concurrency.lockutils [req-3c8158c0-3937-469e-8754-4992578f43ce req-d53a9b69-69eb-4d13-bc20-855df862a032 service nova] Acquiring lock "refresh_cache-e1eec0ce-8df7-402a-b628-5dfdc11949e7" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.113084] env[69328]: DEBUG oslo_concurrency.lockutils [req-3c8158c0-3937-469e-8754-4992578f43ce req-d53a9b69-69eb-4d13-bc20-855df862a032 service nova] Acquired lock "refresh_cache-e1eec0ce-8df7-402a-b628-5dfdc11949e7" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 706.113365] env[69328]: DEBUG nova.network.neutron [req-3c8158c0-3937-469e-8754-4992578f43ce req-d53a9b69-69eb-4d13-bc20-855df862a032 service nova] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Refreshing network info cache for port f159b639-986d-4584-94e0-589e890cb653 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 706.238988] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3272990, 'name': CreateVM_Task, 'duration_secs': 0.39495} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.239261] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 706.240283] env[69328]: DEBUG oslo_concurrency.lockutils [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.240390] env[69328]: DEBUG oslo_concurrency.lockutils [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 706.240826] env[69328]: DEBUG oslo_concurrency.lockutils [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 706.241162] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a376792-e813-4e38-87cb-a8dec5c93b7a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.248244] env[69328]: DEBUG oslo_vmware.api [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Waiting for the task: (returnval){ [ 706.248244] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5240865d-376f-2272-3b99-8cf7ac43ffb8" [ 706.248244] env[69328]: _type = "Task" [ 706.248244] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.256631] env[69328]: DEBUG oslo_vmware.api [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5240865d-376f-2272-3b99-8cf7ac43ffb8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.407278] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a631e20-382e-4b82-8d10-dfe6b5f96176 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.415577] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c109e74-bd6d-4971-a540-c0dce3dd911b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.460209] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fd6a0b5-83fb-4d30-a10d-f53d19401348 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.473281] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9f43305-7239-4132-bddb-dd13eda89843 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.500633] env[69328]: DEBUG oslo_concurrency.lockutils [None req-af64df0c-6b85-49fd-a4d1-e6f242033274 tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 706.501414] env[69328]: DEBUG nova.compute.provider_tree [None req-aa19d4f8-aed4-4937-81d3-93f3297da951 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 706.527895] env[69328]: INFO nova.compute.manager [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Took 53.42 seconds to build instance. [ 706.585991] env[69328]: DEBUG oslo_vmware.api [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3272991, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079621} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.585991] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 706.585991] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f47b480-603d-40d3-b16d-3d2567179500 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.608105] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Reconfiguring VM instance instance-0000001e to attach disk [datastore1] 8e3a73c1-b622-47f4-99af-71b6dba7c09b/8e3a73c1-b622-47f4-99af-71b6dba7c09b.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 706.609222] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b71d5405-6a09-4420-8e38-1c26295bcd67 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.630603] env[69328]: DEBUG oslo_vmware.api [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Waiting for the task: (returnval){ [ 706.630603] env[69328]: value = "task-3272992" [ 706.630603] env[69328]: _type = "Task" [ 706.630603] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.639955] env[69328]: DEBUG oslo_vmware.api [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3272992, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.644198] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Acquiring lock "25fb207b-9388-4198-bb48-ab7cebd43375" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 706.645027] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Lock "25fb207b-9388-4198-bb48-ab7cebd43375" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 706.759868] env[69328]: DEBUG oslo_vmware.api [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5240865d-376f-2272-3b99-8cf7ac43ffb8, 'name': SearchDatastore_Task, 'duration_secs': 0.022801} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.759868] env[69328]: DEBUG oslo_concurrency.lockutils [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 706.759868] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 706.760213] env[69328]: DEBUG oslo_concurrency.lockutils [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.760575] env[69328]: DEBUG oslo_concurrency.lockutils [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 706.760575] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 706.760978] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6f0539d4-6f40-4e0e-a20f-7b9884b3b833 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.769133] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 706.769392] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 706.773245] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-836d8b86-2395-4c7e-b427-f640fed10470 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.779516] env[69328]: DEBUG oslo_vmware.api [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Waiting for the task: (returnval){ [ 706.779516] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52548b9f-2093-c601-480d-a5bf93f24932" [ 706.779516] env[69328]: _type = "Task" [ 706.779516] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.787869] env[69328]: DEBUG oslo_vmware.api [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52548b9f-2093-c601-480d-a5bf93f24932, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.974612] env[69328]: DEBUG nova.network.neutron [req-3c8158c0-3937-469e-8754-4992578f43ce req-d53a9b69-69eb-4d13-bc20-855df862a032 service nova] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Updated VIF entry in instance network info cache for port f159b639-986d-4584-94e0-589e890cb653. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 706.974612] env[69328]: DEBUG nova.network.neutron [req-3c8158c0-3937-469e-8754-4992578f43ce req-d53a9b69-69eb-4d13-bc20-855df862a032 service nova] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Updating instance_info_cache with network_info: [{"id": "f159b639-986d-4584-94e0-589e890cb653", "address": "fa:16:3e:78:ba:5e", "network": {"id": "b7b15f77-0584-4f19-a05e-67df3efe1b9d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-778653716-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8502178b3d334c338b63dfde3eae8f08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d275d7c6-2a7b-4ee8-b6f4-fabf1ba1905f", "external-id": "nsx-vlan-transportzone-513", "segmentation_id": 513, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf159b639-98", "ovs_interfaceid": "f159b639-986d-4584-94e0-589e890cb653", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 707.033289] env[69328]: ERROR nova.scheduler.client.report [None req-aa19d4f8-aed4-4937-81d3-93f3297da951 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [req-151a9caa-9193-4b87-a835-8051946cf2e3] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-151a9caa-9193-4b87-a835-8051946cf2e3"}]} [ 707.033289] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a40713f-84b6-4637-9deb-f349d2897d55 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.644s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 707.052903] env[69328]: DEBUG nova.scheduler.client.report [None req-aa19d4f8-aed4-4937-81d3-93f3297da951 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Refreshing inventories for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 707.071580] env[69328]: DEBUG nova.scheduler.client.report [None req-aa19d4f8-aed4-4937-81d3-93f3297da951 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Updating ProviderTree inventory for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 707.071580] env[69328]: DEBUG nova.compute.provider_tree [None req-aa19d4f8-aed4-4937-81d3-93f3297da951 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 707.086613] env[69328]: DEBUG nova.scheduler.client.report [None req-aa19d4f8-aed4-4937-81d3-93f3297da951 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Refreshing aggregate associations for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e, aggregates: None {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 707.111527] env[69328]: DEBUG nova.scheduler.client.report [None req-aa19d4f8-aed4-4937-81d3-93f3297da951 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Refreshing trait associations for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e, traits: COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 707.148153] env[69328]: DEBUG oslo_vmware.api [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3272992, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.266120] env[69328]: DEBUG nova.network.neutron [-] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 707.293455] env[69328]: DEBUG oslo_vmware.api [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52548b9f-2093-c601-480d-a5bf93f24932, 'name': SearchDatastore_Task, 'duration_secs': 0.020661} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.300148] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b44f70cb-d2ca-4e4c-9ae1-b542a12637c5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.307535] env[69328]: DEBUG oslo_vmware.api [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Waiting for the task: (returnval){ [ 707.307535] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d19fb8-b258-625d-3bab-79a240a75301" [ 707.307535] env[69328]: _type = "Task" [ 707.307535] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.322029] env[69328]: DEBUG oslo_vmware.api [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d19fb8-b258-625d-3bab-79a240a75301, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.380085] env[69328]: INFO nova.compute.manager [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Rebuilding instance [ 707.436317] env[69328]: DEBUG nova.compute.manager [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 707.437125] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0e3b707-c8a4-4b58-86b4-3d50c370ca2d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.477277] env[69328]: DEBUG oslo_concurrency.lockutils [req-3c8158c0-3937-469e-8754-4992578f43ce req-d53a9b69-69eb-4d13-bc20-855df862a032 service nova] Releasing lock "refresh_cache-e1eec0ce-8df7-402a-b628-5dfdc11949e7" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 707.477561] env[69328]: DEBUG nova.compute.manager [req-3c8158c0-3937-469e-8754-4992578f43ce req-d53a9b69-69eb-4d13-bc20-855df862a032 service nova] [instance: b7409a67-c140-436f-9c4e-27dae259f648] Received event network-vif-deleted-b4eef5a7-243c-42a1-803f-2405f4009cf7 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 707.542605] env[69328]: DEBUG nova.compute.manager [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 5292b759-9d1f-486a-b4d6-90519b3ae986] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 707.643296] env[69328]: DEBUG oslo_vmware.api [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3272992, 'name': ReconfigVM_Task, 'duration_secs': 0.609502} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.645905] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Reconfigured VM instance instance-0000001e to attach disk [datastore1] 8e3a73c1-b622-47f4-99af-71b6dba7c09b/8e3a73c1-b622-47f4-99af-71b6dba7c09b.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 707.647700] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cdcfaa84-af7b-4992-bad3-063f78e04c35 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.653389] env[69328]: DEBUG oslo_vmware.api [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Waiting for the task: (returnval){ [ 707.653389] env[69328]: value = "task-3272993" [ 707.653389] env[69328]: _type = "Task" [ 707.653389] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.664371] env[69328]: DEBUG oslo_vmware.api [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3272993, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.741025] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9200ab77-7834-4a5a-bda1-6faaaca46b76 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.748782] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-358a0372-cdc7-4227-8b94-60ce18916b2e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.779190] env[69328]: INFO nova.compute.manager [-] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Took 1.75 seconds to deallocate network for instance. [ 707.781698] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7c192c6-dc3f-4b4e-a0b9-a850dff7137c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.792324] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6f0f7d0-f709-480b-87c9-d5c58cf83c79 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.807814] env[69328]: DEBUG nova.compute.provider_tree [None req-aa19d4f8-aed4-4937-81d3-93f3297da951 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 707.818072] env[69328]: DEBUG oslo_vmware.api [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d19fb8-b258-625d-3bab-79a240a75301, 'name': SearchDatastore_Task, 'duration_secs': 0.015353} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.819139] env[69328]: DEBUG oslo_concurrency.lockutils [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 707.819421] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] e1eec0ce-8df7-402a-b628-5dfdc11949e7/e1eec0ce-8df7-402a-b628-5dfdc11949e7.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 707.820172] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-62a44a62-9c93-415d-9d39-80d998b1173a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.827464] env[69328]: DEBUG oslo_vmware.api [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Waiting for the task: (returnval){ [ 707.827464] env[69328]: value = "task-3272994" [ 707.827464] env[69328]: _type = "Task" [ 707.827464] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.835938] env[69328]: DEBUG oslo_vmware.api [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Task: {'id': task-3272994, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.064236] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 708.166061] env[69328]: DEBUG oslo_vmware.api [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3272993, 'name': Rename_Task, 'duration_secs': 0.186983} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.168749] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 708.168749] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9d838322-93ab-4cf6-8b41-96890ba8b251 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.176469] env[69328]: DEBUG oslo_vmware.api [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Waiting for the task: (returnval){ [ 708.176469] env[69328]: value = "task-3272995" [ 708.176469] env[69328]: _type = "Task" [ 708.176469] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.184741] env[69328]: DEBUG oslo_vmware.api [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3272995, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.215090] env[69328]: DEBUG nova.compute.manager [req-eec2939a-90a5-4bdc-b3c1-e972574a5335 req-9dd9f3d7-1b43-4ad2-801d-93b052d5002b service nova] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Received event network-vif-deleted-369bd8af-cb0d-49c0-b41e-69689c57cc0a {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 708.289742] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d9988052-6a38-48dd-86f7-6ef56deaba6f tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 708.338881] env[69328]: DEBUG oslo_vmware.api [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Task: {'id': task-3272994, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.353214] env[69328]: DEBUG nova.scheduler.client.report [None req-aa19d4f8-aed4-4937-81d3-93f3297da951 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Updated inventory for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with generation 59 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 708.353494] env[69328]: DEBUG nova.compute.provider_tree [None req-aa19d4f8-aed4-4937-81d3-93f3297da951 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Updating resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e generation from 59 to 60 during operation: update_inventory {{(pid=69328) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 708.353681] env[69328]: DEBUG nova.compute.provider_tree [None req-aa19d4f8-aed4-4937-81d3-93f3297da951 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 708.453364] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 708.453783] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6ef62115-8b3d-43b9-9d03-880194b609f9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.461769] env[69328]: DEBUG oslo_vmware.api [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 708.461769] env[69328]: value = "task-3272996" [ 708.461769] env[69328]: _type = "Task" [ 708.461769] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.471216] env[69328]: DEBUG oslo_vmware.api [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3272996, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.685390] env[69328]: DEBUG oslo_vmware.api [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3272995, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.837733] env[69328]: DEBUG oslo_vmware.api [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Task: {'id': task-3272994, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.624178} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.838061] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] e1eec0ce-8df7-402a-b628-5dfdc11949e7/e1eec0ce-8df7-402a-b628-5dfdc11949e7.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 708.838341] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 708.838630] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a9203e80-84c4-4473-8b2f-cf96f0e84e6e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.845494] env[69328]: DEBUG oslo_vmware.api [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Waiting for the task: (returnval){ [ 708.845494] env[69328]: value = "task-3272997" [ 708.845494] env[69328]: _type = "Task" [ 708.845494] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.854645] env[69328]: DEBUG oslo_vmware.api [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Task: {'id': task-3272997, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.859709] env[69328]: DEBUG oslo_concurrency.lockutils [None req-aa19d4f8-aed4-4937-81d3-93f3297da951 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 4.935s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 708.862274] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.496s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 708.863871] env[69328]: INFO nova.compute.claims [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 708.881030] env[69328]: INFO nova.scheduler.client.report [None req-aa19d4f8-aed4-4937-81d3-93f3297da951 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Deleted allocations for instance 9753734d-90f0-4661-8029-ec312e88eb60 [ 708.972260] env[69328]: DEBUG oslo_vmware.api [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3272996, 'name': PowerOffVM_Task, 'duration_secs': 0.327012} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.972524] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 708.972749] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 708.973543] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7864aded-59d0-4752-ba27-81991dff78c2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.980167] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 708.980412] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-01dcca5a-bc93-48fd-99a0-91d781cce34f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.041748] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 709.041986] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 709.042161] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Deleting the datastore file [datastore1] b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 709.042423] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-db0f336c-96a2-4daa-b95f-4766a89bf17e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.049438] env[69328]: DEBUG oslo_vmware.api [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 709.049438] env[69328]: value = "task-3272999" [ 709.049438] env[69328]: _type = "Task" [ 709.049438] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.057544] env[69328]: DEBUG oslo_vmware.api [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3272999, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.186198] env[69328]: DEBUG oslo_vmware.api [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3272995, 'name': PowerOnVM_Task, 'duration_secs': 0.753518} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.188463] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 709.188463] env[69328]: INFO nova.compute.manager [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Took 8.57 seconds to spawn the instance on the hypervisor. [ 709.188463] env[69328]: DEBUG nova.compute.manager [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 709.188463] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0446c0a4-e95d-463f-a2e3-59036ba0b025 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.355909] env[69328]: DEBUG oslo_vmware.api [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Task: {'id': task-3272997, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.103988} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.356211] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 709.357034] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef9ceeda-1b70-41ae-aeb5-8c9ba31f97a8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.384066] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Reconfiguring VM instance instance-0000001f to attach disk [datastore2] e1eec0ce-8df7-402a-b628-5dfdc11949e7/e1eec0ce-8df7-402a-b628-5dfdc11949e7.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 709.384066] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d886809f-d09c-46c6-a4e7-0ab8ed2c20eb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.402855] env[69328]: DEBUG oslo_concurrency.lockutils [None req-aa19d4f8-aed4-4937-81d3-93f3297da951 tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Lock "9753734d-90f0-4661-8029-ec312e88eb60" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.065s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 709.410466] env[69328]: DEBUG oslo_vmware.api [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Waiting for the task: (returnval){ [ 709.410466] env[69328]: value = "task-3273000" [ 709.410466] env[69328]: _type = "Task" [ 709.410466] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.418366] env[69328]: DEBUG oslo_vmware.api [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Task: {'id': task-3273000, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.560329] env[69328]: DEBUG oslo_vmware.api [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3272999, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.293419} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.560690] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 709.560941] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 709.561194] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 709.705236] env[69328]: INFO nova.compute.manager [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Took 47.26 seconds to build instance. [ 709.803524] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6f170196-6d2e-4f2f-8577-f6832c4cd8bf tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Acquiring lock "676173ee-8001-48c6-bd28-09130f6dd99a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 709.803819] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6f170196-6d2e-4f2f-8577-f6832c4cd8bf tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Lock "676173ee-8001-48c6-bd28-09130f6dd99a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 709.804131] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6f170196-6d2e-4f2f-8577-f6832c4cd8bf tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Acquiring lock "676173ee-8001-48c6-bd28-09130f6dd99a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 709.804232] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6f170196-6d2e-4f2f-8577-f6832c4cd8bf tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Lock "676173ee-8001-48c6-bd28-09130f6dd99a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 709.804459] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6f170196-6d2e-4f2f-8577-f6832c4cd8bf tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Lock "676173ee-8001-48c6-bd28-09130f6dd99a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 709.807515] env[69328]: INFO nova.compute.manager [None req-6f170196-6d2e-4f2f-8577-f6832c4cd8bf tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] Terminating instance [ 709.921267] env[69328]: DEBUG oslo_vmware.api [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Task: {'id': task-3273000, 'name': ReconfigVM_Task, 'duration_secs': 0.333359} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.921563] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Reconfigured VM instance instance-0000001f to attach disk [datastore2] e1eec0ce-8df7-402a-b628-5dfdc11949e7/e1eec0ce-8df7-402a-b628-5dfdc11949e7.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 709.924659] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8240fde2-a48c-45aa-976b-49c127e7aeb1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.931614] env[69328]: DEBUG oslo_vmware.api [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Waiting for the task: (returnval){ [ 709.931614] env[69328]: value = "task-3273001" [ 709.931614] env[69328]: _type = "Task" [ 709.931614] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.940116] env[69328]: DEBUG oslo_vmware.api [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Task: {'id': task-3273001, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.208248] env[69328]: DEBUG oslo_concurrency.lockutils [None req-40a5b8e2-1b1f-43c9-ae5e-6df1db8bd470 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Lock "8e3a73c1-b622-47f4-99af-71b6dba7c09b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.763s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 710.311887] env[69328]: DEBUG nova.compute.manager [None req-6f170196-6d2e-4f2f-8577-f6832c4cd8bf tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 710.313073] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-6f170196-6d2e-4f2f-8577-f6832c4cd8bf tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 710.313452] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1ff3ae0-4f89-4f61-8e2a-5650661ab70b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.321893] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f170196-6d2e-4f2f-8577-f6832c4cd8bf tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 710.322189] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-87d2ea46-cc98-43df-ba60-32c7d3907971 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.330983] env[69328]: DEBUG oslo_vmware.api [None req-6f170196-6d2e-4f2f-8577-f6832c4cd8bf tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Waiting for the task: (returnval){ [ 710.330983] env[69328]: value = "task-3273002" [ 710.330983] env[69328]: _type = "Task" [ 710.330983] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.339694] env[69328]: DEBUG oslo_vmware.api [None req-6f170196-6d2e-4f2f-8577-f6832c4cd8bf tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Task: {'id': task-3273002, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.376396] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebf61252-c403-4ae1-bbc0-dc95c37498f2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.384214] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6c4a2e1-c0ff-4172-b717-d08bdf5ecd80 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.418387] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1989e8f-8fc1-4332-a945-78cec8db9db4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.426567] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d7bfa01-9251-4e8f-a42e-39f168e25fe4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.445424] env[69328]: DEBUG nova.compute.provider_tree [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 710.449735] env[69328]: DEBUG oslo_vmware.api [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Task: {'id': task-3273001, 'name': Rename_Task, 'duration_secs': 0.167111} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.452475] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 710.452475] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e0f24627-7449-4dcc-9807-a1804d59eef9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.457322] env[69328]: DEBUG oslo_vmware.api [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Waiting for the task: (returnval){ [ 710.457322] env[69328]: value = "task-3273003" [ 710.457322] env[69328]: _type = "Task" [ 710.457322] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.466705] env[69328]: DEBUG oslo_vmware.api [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Task: {'id': task-3273003, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.710919] env[69328]: DEBUG nova.compute.manager [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: afa25f89-ccda-4b77-aaa1-a3b62b53d870] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 710.839977] env[69328]: DEBUG oslo_vmware.api [None req-6f170196-6d2e-4f2f-8577-f6832c4cd8bf tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Task: {'id': task-3273002, 'name': PowerOffVM_Task, 'duration_secs': 0.264617} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.840283] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f170196-6d2e-4f2f-8577-f6832c4cd8bf tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 710.840453] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-6f170196-6d2e-4f2f-8577-f6832c4cd8bf tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 710.840740] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fe7563b1-7cb9-45ed-adb3-00e3777979d4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.904968] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-6f170196-6d2e-4f2f-8577-f6832c4cd8bf tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 710.905113] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-6f170196-6d2e-4f2f-8577-f6832c4cd8bf tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 710.905301] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f170196-6d2e-4f2f-8577-f6832c4cd8bf tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Deleting the datastore file [datastore2] 676173ee-8001-48c6-bd28-09130f6dd99a {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 710.905614] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0d7db8a6-a528-4e24-aee3-7e70103fd9f3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.912708] env[69328]: DEBUG oslo_vmware.api [None req-6f170196-6d2e-4f2f-8577-f6832c4cd8bf tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Waiting for the task: (returnval){ [ 710.912708] env[69328]: value = "task-3273005" [ 710.912708] env[69328]: _type = "Task" [ 710.912708] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.918541] env[69328]: DEBUG nova.virt.hardware [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 710.918541] env[69328]: DEBUG nova.virt.hardware [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 710.919138] env[69328]: DEBUG nova.virt.hardware [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 710.919138] env[69328]: DEBUG nova.virt.hardware [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 710.919138] env[69328]: DEBUG nova.virt.hardware [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 710.919138] env[69328]: DEBUG nova.virt.hardware [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 710.919416] env[69328]: DEBUG nova.virt.hardware [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 710.919465] env[69328]: DEBUG nova.virt.hardware [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 710.919630] env[69328]: DEBUG nova.virt.hardware [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 710.919786] env[69328]: DEBUG nova.virt.hardware [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 710.919954] env[69328]: DEBUG nova.virt.hardware [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 710.920795] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d549ba9d-0c6f-45f3-9144-fdae9b60adf1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.930155] env[69328]: DEBUG oslo_vmware.api [None req-6f170196-6d2e-4f2f-8577-f6832c4cd8bf tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Task: {'id': task-3273005, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.931489] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f3991ba-ce80-4b46-8700-f044c9a24fe8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.945290] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1e:59:e3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'abcf0d10-3f3f-45dc-923e-1c78766e2dad', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '67a5c2b8-cfa7-474e-91f4-f5b16fab46ca', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 710.952700] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 710.954667] env[69328]: DEBUG nova.scheduler.client.report [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 710.957884] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 710.960204] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3cb4d846-44c2-4c9c-bc9e-34567b94540d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.991853] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 710.991853] env[69328]: value = "task-3273006" [ 710.991853] env[69328]: _type = "Task" [ 710.991853] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.992142] env[69328]: DEBUG oslo_vmware.api [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Task: {'id': task-3273003, 'name': PowerOnVM_Task} progress is 94%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.002308] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273006, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.234970] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 711.407015] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0636358c-b911-4573-b470-aef12f83ed4f tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquiring lock "bc9c3a41-7264-4d69-bc15-397b5fa0a8ad" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 711.407511] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0636358c-b911-4573-b470-aef12f83ed4f tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "bc9c3a41-7264-4d69-bc15-397b5fa0a8ad" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 711.424307] env[69328]: DEBUG oslo_vmware.api [None req-6f170196-6d2e-4f2f-8577-f6832c4cd8bf tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Task: {'id': task-3273005, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.237155} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.424562] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f170196-6d2e-4f2f-8577-f6832c4cd8bf tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 711.424757] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-6f170196-6d2e-4f2f-8577-f6832c4cd8bf tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 711.424937] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-6f170196-6d2e-4f2f-8577-f6832c4cd8bf tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 711.425317] env[69328]: INFO nova.compute.manager [None req-6f170196-6d2e-4f2f-8577-f6832c4cd8bf tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] Took 1.11 seconds to destroy the instance on the hypervisor. [ 711.425471] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6f170196-6d2e-4f2f-8577-f6832c4cd8bf tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 711.425660] env[69328]: DEBUG nova.compute.manager [-] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 711.425753] env[69328]: DEBUG nova.network.neutron [-] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 711.461701] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.599s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 711.462329] env[69328]: DEBUG nova.compute.manager [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 711.464967] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 32.954s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 711.465181] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 711.465356] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69328) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 711.465656] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.807s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 711.467536] env[69328]: INFO nova.compute.claims [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 711.470935] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-840dca84-25a0-46c9-a213-0c97f071c9ae {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.484541] env[69328]: DEBUG nova.compute.manager [None req-efc48e49-d6ac-4e1b-af1c-197154841ab9 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 711.486756] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0149d88-60c6-48f3-abb0-24b1002f3c66 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.497484] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cdca147-a2f6-453c-b381-b720dacaca1a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.500337] env[69328]: DEBUG oslo_vmware.api [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Task: {'id': task-3273003, 'name': PowerOnVM_Task, 'duration_secs': 0.725465} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.504050] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 711.504286] env[69328]: INFO nova.compute.manager [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Took 8.31 seconds to spawn the instance on the hypervisor. [ 711.504466] env[69328]: DEBUG nova.compute.manager [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 711.515181] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f27a241-e5b7-4188-bb1a-e33b87860c89 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.522268] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-902344b8-246e-48f4-977f-234e854968e9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.531023] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273006, 'name': CreateVM_Task, 'duration_secs': 0.413258} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.531655] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 711.532373] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 711.532532] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 711.532853] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 711.536286] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5bb08a7f-eddc-4c99-9c33-bb62c8af6964 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.541861] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-976264e1-2e6b-4409-9a3b-9bd2adcb65f8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.574929] env[69328]: DEBUG oslo_vmware.api [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 711.574929] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52381b36-4d65-3adc-b034-723a6babc40e" [ 711.574929] env[69328]: _type = "Task" [ 711.574929] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.576579] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179126MB free_disk=116GB free_vcpus=48 pci_devices=None {{(pid=69328) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 711.576726] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 711.587345] env[69328]: DEBUG oslo_vmware.api [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52381b36-4d65-3adc-b034-723a6babc40e, 'name': SearchDatastore_Task, 'duration_secs': 0.016951} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.587666] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 711.587908] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 711.588158] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 711.588302] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 711.588481] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 711.588765] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-07da4ad2-bae5-47df-ba6a-308464a71190 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.598056] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 711.598253] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 711.599040] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a5a016f-a6dd-4c2a-abc2-74c6b662fbce {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.604688] env[69328]: DEBUG oslo_vmware.api [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 711.604688] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52857c5a-16b3-9d8e-3abd-b8d6d9c3fd70" [ 711.604688] env[69328]: _type = "Task" [ 711.604688] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.613617] env[69328]: DEBUG oslo_vmware.api [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52857c5a-16b3-9d8e-3abd-b8d6d9c3fd70, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.911349] env[69328]: DEBUG nova.compute.utils [None req-0636358c-b911-4573-b470-aef12f83ed4f tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 711.973296] env[69328]: DEBUG nova.compute.utils [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 711.973968] env[69328]: DEBUG nova.compute.manager [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 711.974320] env[69328]: DEBUG nova.network.neutron [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 712.032853] env[69328]: INFO nova.compute.manager [None req-efc48e49-d6ac-4e1b-af1c-197154841ab9 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] instance snapshotting [ 712.035798] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3599f78-e280-41ca-8bdc-5763b8b10104 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.062853] env[69328]: INFO nova.compute.manager [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Took 46.28 seconds to build instance. [ 712.066730] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d08cd0b-7c68-4e6c-918c-1d0b171e5dc8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.073467] env[69328]: DEBUG nova.policy [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5110ae0cc422450ca918d256fe8c1659', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '435a67cec87842678e6c1c354ab09bd7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 712.116456] env[69328]: DEBUG oslo_vmware.api [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52857c5a-16b3-9d8e-3abd-b8d6d9c3fd70, 'name': SearchDatastore_Task, 'duration_secs': 0.013126} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.117535] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e12df6b-8faa-4894-bf9f-cd9e44c97ad0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.124694] env[69328]: DEBUG oslo_vmware.api [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 712.124694] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]523cc20c-da93-543e-90f2-29defc630450" [ 712.124694] env[69328]: _type = "Task" [ 712.124694] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.138063] env[69328]: DEBUG oslo_vmware.api [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]523cc20c-da93-543e-90f2-29defc630450, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.415309] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0636358c-b911-4573-b470-aef12f83ed4f tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "bc9c3a41-7264-4d69-bc15-397b5fa0a8ad" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 712.450905] env[69328]: DEBUG nova.network.neutron [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] Successfully created port: 60d09662-fefa-479c-b18f-6c4109ede4e4 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 712.480214] env[69328]: DEBUG nova.compute.manager [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 712.539068] env[69328]: DEBUG nova.compute.manager [req-b9d7ea1d-ef70-4e27-8279-e57bf4c15679 req-5015fe71-048c-4abd-9c54-cf591ad8f865 service nova] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] Received event network-vif-deleted-4764934e-430e-4e3b-a834-5bc38771987e {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 712.539068] env[69328]: INFO nova.compute.manager [req-b9d7ea1d-ef70-4e27-8279-e57bf4c15679 req-5015fe71-048c-4abd-9c54-cf591ad8f865 service nova] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] Neutron deleted interface 4764934e-430e-4e3b-a834-5bc38771987e; detaching it from the instance and deleting it from the info cache [ 712.539068] env[69328]: DEBUG nova.network.neutron [req-b9d7ea1d-ef70-4e27-8279-e57bf4c15679 req-5015fe71-048c-4abd-9c54-cf591ad8f865 service nova] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 712.572606] env[69328]: DEBUG nova.network.neutron [-] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 712.572606] env[69328]: DEBUG oslo_concurrency.lockutils [None req-908a1906-f262-43e4-818c-a05d91dda93b tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Lock "e1eec0ce-8df7-402a-b628-5dfdc11949e7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 72.851s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 712.583616] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-efc48e49-d6ac-4e1b-af1c-197154841ab9 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Creating Snapshot of the VM instance {{(pid=69328) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 712.583616] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-98152bec-7441-4417-94e1-1119b967fdbe {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.595537] env[69328]: DEBUG oslo_vmware.api [None req-efc48e49-d6ac-4e1b-af1c-197154841ab9 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Waiting for the task: (returnval){ [ 712.595537] env[69328]: value = "task-3273007" [ 712.595537] env[69328]: _type = "Task" [ 712.595537] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.606512] env[69328]: DEBUG oslo_vmware.api [None req-efc48e49-d6ac-4e1b-af1c-197154841ab9 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273007, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.636256] env[69328]: DEBUG oslo_vmware.api [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]523cc20c-da93-543e-90f2-29defc630450, 'name': SearchDatastore_Task, 'duration_secs': 0.011627} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.639202] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 712.639506] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25/b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 712.640084] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f34530cc-464f-4032-a29f-9feef15f0bde {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.646539] env[69328]: DEBUG oslo_vmware.api [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 712.646539] env[69328]: value = "task-3273008" [ 712.646539] env[69328]: _type = "Task" [ 712.646539] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.657012] env[69328]: DEBUG oslo_vmware.api [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273008, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.046911] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c820b03-3152-4e5a-b95a-11e1a6675451 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.050791] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-02cf8046-3498-47e1-a46a-a14fed0e9a4b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.061691] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-538d4617-2e44-42e5-859f-ff1efdb3d19a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.068468] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e377be3-5e9b-4f8e-8380-f65f8afa4be2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.081383] env[69328]: INFO nova.compute.manager [-] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] Took 1.66 seconds to deallocate network for instance. [ 713.082183] env[69328]: DEBUG nova.compute.manager [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 713.125031] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14fb4f98-a1db-41c0-91ee-85541c51b3f5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.137947] env[69328]: DEBUG nova.compute.manager [req-b9d7ea1d-ef70-4e27-8279-e57bf4c15679 req-5015fe71-048c-4abd-9c54-cf591ad8f865 service nova] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] Detach interface failed, port_id=4764934e-430e-4e3b-a834-5bc38771987e, reason: Instance 676173ee-8001-48c6-bd28-09130f6dd99a could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 713.153169] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a4ca834-ee7f-46b2-9abb-7c8b3bc65f60 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.158916] env[69328]: DEBUG oslo_vmware.api [None req-efc48e49-d6ac-4e1b-af1c-197154841ab9 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273007, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.169142] env[69328]: DEBUG oslo_vmware.api [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273008, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.179947] env[69328]: DEBUG nova.compute.provider_tree [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 713.328895] env[69328]: DEBUG oslo_vmware.rw_handles [None req-97cf03e4-4e29-43d7-a258-bd6519233b35 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ba609a-ed0f-a223-b5a4-0c7b7b167131/disk-0.vmdk. {{(pid=69328) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 713.330088] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cb5e986-f472-45b9-b87a-36c1dfbdf432 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.336395] env[69328]: DEBUG oslo_vmware.rw_handles [None req-97cf03e4-4e29-43d7-a258-bd6519233b35 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ba609a-ed0f-a223-b5a4-0c7b7b167131/disk-0.vmdk is in state: ready. {{(pid=69328) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 713.336505] env[69328]: ERROR oslo_vmware.rw_handles [None req-97cf03e4-4e29-43d7-a258-bd6519233b35 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ba609a-ed0f-a223-b5a4-0c7b7b167131/disk-0.vmdk due to incomplete transfer. [ 713.336734] env[69328]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-305cd671-410b-479b-9ed3-96221af202f1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.343615] env[69328]: DEBUG oslo_vmware.rw_handles [None req-97cf03e4-4e29-43d7-a258-bd6519233b35 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ba609a-ed0f-a223-b5a4-0c7b7b167131/disk-0.vmdk. {{(pid=69328) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 713.343801] env[69328]: DEBUG nova.virt.vmwareapi.images [None req-97cf03e4-4e29-43d7-a258-bd6519233b35 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Uploaded image 80556425-0db6-4f16-b889-eb057a86400c to the Glance image server {{(pid=69328) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 713.345871] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-97cf03e4-4e29-43d7-a258-bd6519233b35 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Destroying the VM {{(pid=69328) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 713.346121] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-ce5bc71f-e509-4ea4-bf5b-0213d7553134 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.352232] env[69328]: DEBUG oslo_vmware.api [None req-97cf03e4-4e29-43d7-a258-bd6519233b35 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Waiting for the task: (returnval){ [ 713.352232] env[69328]: value = "task-3273009" [ 713.352232] env[69328]: _type = "Task" [ 713.352232] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.360206] env[69328]: DEBUG oslo_vmware.api [None req-97cf03e4-4e29-43d7-a258-bd6519233b35 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3273009, 'name': Destroy_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.501775] env[69328]: DEBUG nova.compute.manager [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 713.504395] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0636358c-b911-4573-b470-aef12f83ed4f tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquiring lock "bc9c3a41-7264-4d69-bc15-397b5fa0a8ad" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 713.504617] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0636358c-b911-4573-b470-aef12f83ed4f tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "bc9c3a41-7264-4d69-bc15-397b5fa0a8ad" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 713.504835] env[69328]: INFO nova.compute.manager [None req-0636358c-b911-4573-b470-aef12f83ed4f tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Attaching volume 99338b52-3801-47a7-ab57-21495a480b27 to /dev/sdb [ 713.526487] env[69328]: DEBUG nova.virt.hardware [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 713.526964] env[69328]: DEBUG nova.virt.hardware [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 713.527053] env[69328]: DEBUG nova.virt.hardware [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 713.527543] env[69328]: DEBUG nova.virt.hardware [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 713.527596] env[69328]: DEBUG nova.virt.hardware [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 713.527835] env[69328]: DEBUG nova.virt.hardware [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 713.528202] env[69328]: DEBUG nova.virt.hardware [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 713.528471] env[69328]: DEBUG nova.virt.hardware [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 713.528741] env[69328]: DEBUG nova.virt.hardware [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 713.528949] env[69328]: DEBUG nova.virt.hardware [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 713.529172] env[69328]: DEBUG nova.virt.hardware [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 713.530139] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b640cfe-5f07-4d45-9cfe-8d17484eb92b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.539394] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b322896-11b6-4a8e-b345-38821675a970 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.545295] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba938a1b-8608-436c-b76c-269d27e1867c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.561321] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6da87d66-5c90-431f-8a14-ca969383fba8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.574766] env[69328]: DEBUG nova.virt.block_device [None req-0636358c-b911-4573-b470-aef12f83ed4f tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Updating existing volume attachment record: f0ccde55-1c43-47cb-8e49-eab3a859c71d {{(pid=69328) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 713.627585] env[69328]: DEBUG oslo_vmware.api [None req-efc48e49-d6ac-4e1b-af1c-197154841ab9 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273007, 'name': CreateSnapshot_Task, 'duration_secs': 0.646406} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.627585] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-efc48e49-d6ac-4e1b-af1c-197154841ab9 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Created Snapshot of the VM instance {{(pid=69328) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 713.628353] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6b15ee7-3c1a-498c-a32b-a41174a72b8d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.640165] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 713.641235] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6f170196-6d2e-4f2f-8577-f6832c4cd8bf tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 713.649427] env[69328]: DEBUG nova.compute.manager [req-c0dfd9e8-0a4c-4bb6-876b-7957c5b4ced9 req-00177e20-96e5-4097-beec-906b694b444c service nova] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Received event network-changed-f159b639-986d-4584-94e0-589e890cb653 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 713.649631] env[69328]: DEBUG nova.compute.manager [req-c0dfd9e8-0a4c-4bb6-876b-7957c5b4ced9 req-00177e20-96e5-4097-beec-906b694b444c service nova] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Refreshing instance network info cache due to event network-changed-f159b639-986d-4584-94e0-589e890cb653. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 713.649854] env[69328]: DEBUG oslo_concurrency.lockutils [req-c0dfd9e8-0a4c-4bb6-876b-7957c5b4ced9 req-00177e20-96e5-4097-beec-906b694b444c service nova] Acquiring lock "refresh_cache-e1eec0ce-8df7-402a-b628-5dfdc11949e7" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 713.649997] env[69328]: DEBUG oslo_concurrency.lockutils [req-c0dfd9e8-0a4c-4bb6-876b-7957c5b4ced9 req-00177e20-96e5-4097-beec-906b694b444c service nova] Acquired lock "refresh_cache-e1eec0ce-8df7-402a-b628-5dfdc11949e7" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 713.650173] env[69328]: DEBUG nova.network.neutron [req-c0dfd9e8-0a4c-4bb6-876b-7957c5b4ced9 req-00177e20-96e5-4097-beec-906b694b444c service nova] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Refreshing network info cache for port f159b639-986d-4584-94e0-589e890cb653 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 713.668084] env[69328]: DEBUG oslo_vmware.api [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273008, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.544835} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.668383] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25/b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 713.668693] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 713.668969] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a50e210d-b794-423b-86e0-487a71e3cb7a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.678673] env[69328]: DEBUG oslo_vmware.api [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 713.678673] env[69328]: value = "task-3273010" [ 713.678673] env[69328]: _type = "Task" [ 713.678673] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.682983] env[69328]: DEBUG nova.scheduler.client.report [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 713.690019] env[69328]: DEBUG oslo_vmware.api [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273010, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.862626] env[69328]: DEBUG oslo_vmware.api [None req-97cf03e4-4e29-43d7-a258-bd6519233b35 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3273009, 'name': Destroy_Task, 'duration_secs': 0.348651} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.862920] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-97cf03e4-4e29-43d7-a258-bd6519233b35 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Destroyed the VM [ 713.863180] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-97cf03e4-4e29-43d7-a258-bd6519233b35 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Deleting Snapshot of the VM instance {{(pid=69328) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 713.863438] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-9e3ebca1-0557-463d-be24-b3e9a1dc190d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.869772] env[69328]: DEBUG oslo_vmware.api [None req-97cf03e4-4e29-43d7-a258-bd6519233b35 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Waiting for the task: (returnval){ [ 713.869772] env[69328]: value = "task-3273012" [ 713.869772] env[69328]: _type = "Task" [ 713.869772] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.878182] env[69328]: DEBUG oslo_vmware.api [None req-97cf03e4-4e29-43d7-a258-bd6519233b35 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3273012, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.099086] env[69328]: DEBUG nova.network.neutron [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] Successfully updated port: 60d09662-fefa-479c-b18f-6c4109ede4e4 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 714.149144] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-efc48e49-d6ac-4e1b-af1c-197154841ab9 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Creating linked-clone VM from snapshot {{(pid=69328) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 714.150240] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-b221a634-0ecc-44df-acd8-121f975fe05a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.162660] env[69328]: DEBUG oslo_vmware.api [None req-efc48e49-d6ac-4e1b-af1c-197154841ab9 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Waiting for the task: (returnval){ [ 714.162660] env[69328]: value = "task-3273015" [ 714.162660] env[69328]: _type = "Task" [ 714.162660] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.173482] env[69328]: DEBUG oslo_vmware.api [None req-efc48e49-d6ac-4e1b-af1c-197154841ab9 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273015, 'name': CloneVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.188287] env[69328]: DEBUG oslo_vmware.api [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273010, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066982} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.190980] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 714.191758] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.726s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 714.192242] env[69328]: DEBUG nova.compute.manager [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 714.195347] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ecafdbb-5c3a-489a-bf38-fd3af5771b31 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.198777] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.114s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 714.199761] env[69328]: INFO nova.compute.claims [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 714.230159] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Reconfiguring VM instance instance-0000001d to attach disk [datastore1] b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25/b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 714.230545] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-80cf38ac-3f16-44dd-a775-29f6df54936f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.253159] env[69328]: DEBUG oslo_vmware.api [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 714.253159] env[69328]: value = "task-3273016" [ 714.253159] env[69328]: _type = "Task" [ 714.253159] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.263442] env[69328]: DEBUG oslo_vmware.api [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273016, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.295939] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2f602ab6-90e7-4def-b053-78444d5903e1 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Acquiring lock "e1eec0ce-8df7-402a-b628-5dfdc11949e7" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 714.296398] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2f602ab6-90e7-4def-b053-78444d5903e1 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Lock "e1eec0ce-8df7-402a-b628-5dfdc11949e7" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 714.296808] env[69328]: INFO nova.compute.manager [None req-2f602ab6-90e7-4def-b053-78444d5903e1 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Rebooting instance [ 714.380020] env[69328]: DEBUG oslo_vmware.api [None req-97cf03e4-4e29-43d7-a258-bd6519233b35 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3273012, 'name': RemoveSnapshot_Task} progress is 30%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.445294] env[69328]: DEBUG nova.network.neutron [req-c0dfd9e8-0a4c-4bb6-876b-7957c5b4ced9 req-00177e20-96e5-4097-beec-906b694b444c service nova] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Updated VIF entry in instance network info cache for port f159b639-986d-4584-94e0-589e890cb653. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 714.445830] env[69328]: DEBUG nova.network.neutron [req-c0dfd9e8-0a4c-4bb6-876b-7957c5b4ced9 req-00177e20-96e5-4097-beec-906b694b444c service nova] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Updating instance_info_cache with network_info: [{"id": "f159b639-986d-4584-94e0-589e890cb653", "address": "fa:16:3e:78:ba:5e", "network": {"id": "b7b15f77-0584-4f19-a05e-67df3efe1b9d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-778653716-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8502178b3d334c338b63dfde3eae8f08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d275d7c6-2a7b-4ee8-b6f4-fabf1ba1905f", "external-id": "nsx-vlan-transportzone-513", "segmentation_id": 513, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf159b639-98", "ovs_interfaceid": "f159b639-986d-4584-94e0-589e890cb653", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 714.601344] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Acquiring lock "refresh_cache-1e7e9e6e-c084-480c-8653-8441c13d7514" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 714.601685] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Acquired lock "refresh_cache-1e7e9e6e-c084-480c-8653-8441c13d7514" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 714.601685] env[69328]: DEBUG nova.network.neutron [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 714.628549] env[69328]: DEBUG oslo_concurrency.lockutils [None req-008d5d67-cb82-4672-af1b-6d9c3e4bb77a tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Acquiring lock "bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 714.628678] env[69328]: DEBUG oslo_concurrency.lockutils [None req-008d5d67-cb82-4672-af1b-6d9c3e4bb77a tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Lock "bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 714.628883] env[69328]: DEBUG oslo_concurrency.lockutils [None req-008d5d67-cb82-4672-af1b-6d9c3e4bb77a tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Acquiring lock "bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 714.629089] env[69328]: DEBUG oslo_concurrency.lockutils [None req-008d5d67-cb82-4672-af1b-6d9c3e4bb77a tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Lock "bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 714.629282] env[69328]: DEBUG oslo_concurrency.lockutils [None req-008d5d67-cb82-4672-af1b-6d9c3e4bb77a tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Lock "bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 714.631628] env[69328]: INFO nova.compute.manager [None req-008d5d67-cb82-4672-af1b-6d9c3e4bb77a tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Terminating instance [ 714.674798] env[69328]: DEBUG oslo_vmware.api [None req-efc48e49-d6ac-4e1b-af1c-197154841ab9 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273015, 'name': CloneVM_Task} progress is 94%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.706959] env[69328]: DEBUG nova.compute.utils [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 714.708510] env[69328]: DEBUG nova.compute.manager [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 714.711084] env[69328]: DEBUG nova.network.neutron [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 714.753039] env[69328]: DEBUG nova.compute.manager [req-18eeaa78-436e-492e-9bdf-48b49098c2f4 req-92e131a6-2731-4a5a-8a4a-57176fd4197c service nova] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] Received event network-vif-plugged-60d09662-fefa-479c-b18f-6c4109ede4e4 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 714.753039] env[69328]: DEBUG oslo_concurrency.lockutils [req-18eeaa78-436e-492e-9bdf-48b49098c2f4 req-92e131a6-2731-4a5a-8a4a-57176fd4197c service nova] Acquiring lock "1e7e9e6e-c084-480c-8653-8441c13d7514-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 714.753039] env[69328]: DEBUG oslo_concurrency.lockutils [req-18eeaa78-436e-492e-9bdf-48b49098c2f4 req-92e131a6-2731-4a5a-8a4a-57176fd4197c service nova] Lock "1e7e9e6e-c084-480c-8653-8441c13d7514-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 714.753618] env[69328]: DEBUG oslo_concurrency.lockutils [req-18eeaa78-436e-492e-9bdf-48b49098c2f4 req-92e131a6-2731-4a5a-8a4a-57176fd4197c service nova] Lock "1e7e9e6e-c084-480c-8653-8441c13d7514-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 714.755746] env[69328]: DEBUG nova.compute.manager [req-18eeaa78-436e-492e-9bdf-48b49098c2f4 req-92e131a6-2731-4a5a-8a4a-57176fd4197c service nova] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] No waiting events found dispatching network-vif-plugged-60d09662-fefa-479c-b18f-6c4109ede4e4 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 714.755746] env[69328]: WARNING nova.compute.manager [req-18eeaa78-436e-492e-9bdf-48b49098c2f4 req-92e131a6-2731-4a5a-8a4a-57176fd4197c service nova] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] Received unexpected event network-vif-plugged-60d09662-fefa-479c-b18f-6c4109ede4e4 for instance with vm_state building and task_state spawning. [ 714.755746] env[69328]: DEBUG nova.compute.manager [req-18eeaa78-436e-492e-9bdf-48b49098c2f4 req-92e131a6-2731-4a5a-8a4a-57176fd4197c service nova] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] Received event network-changed-60d09662-fefa-479c-b18f-6c4109ede4e4 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 714.755746] env[69328]: DEBUG nova.compute.manager [req-18eeaa78-436e-492e-9bdf-48b49098c2f4 req-92e131a6-2731-4a5a-8a4a-57176fd4197c service nova] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] Refreshing instance network info cache due to event network-changed-60d09662-fefa-479c-b18f-6c4109ede4e4. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 714.755746] env[69328]: DEBUG oslo_concurrency.lockutils [req-18eeaa78-436e-492e-9bdf-48b49098c2f4 req-92e131a6-2731-4a5a-8a4a-57176fd4197c service nova] Acquiring lock "refresh_cache-1e7e9e6e-c084-480c-8653-8441c13d7514" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 714.761089] env[69328]: DEBUG nova.policy [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5110ae0cc422450ca918d256fe8c1659', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '435a67cec87842678e6c1c354ab09bd7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 714.771443] env[69328]: DEBUG oslo_vmware.api [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273016, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.825083] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2f602ab6-90e7-4def-b053-78444d5903e1 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Acquiring lock "refresh_cache-e1eec0ce-8df7-402a-b628-5dfdc11949e7" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 714.881387] env[69328]: DEBUG oslo_vmware.api [None req-97cf03e4-4e29-43d7-a258-bd6519233b35 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3273012, 'name': RemoveSnapshot_Task, 'duration_secs': 0.59251} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.881387] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-97cf03e4-4e29-43d7-a258-bd6519233b35 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Deleted Snapshot of the VM instance {{(pid=69328) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 714.882029] env[69328]: INFO nova.compute.manager [None req-97cf03e4-4e29-43d7-a258-bd6519233b35 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Took 15.24 seconds to snapshot the instance on the hypervisor. [ 714.952060] env[69328]: DEBUG oslo_concurrency.lockutils [req-c0dfd9e8-0a4c-4bb6-876b-7957c5b4ced9 req-00177e20-96e5-4097-beec-906b694b444c service nova] Releasing lock "refresh_cache-e1eec0ce-8df7-402a-b628-5dfdc11949e7" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 714.952060] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2f602ab6-90e7-4def-b053-78444d5903e1 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Acquired lock "refresh_cache-e1eec0ce-8df7-402a-b628-5dfdc11949e7" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 714.952060] env[69328]: DEBUG nova.network.neutron [None req-2f602ab6-90e7-4def-b053-78444d5903e1 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 715.093603] env[69328]: DEBUG nova.network.neutron [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] Successfully created port: dd70e166-7f6d-4b58-b33d-e1c74a5da1f1 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 715.138290] env[69328]: DEBUG nova.compute.manager [None req-008d5d67-cb82-4672-af1b-6d9c3e4bb77a tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 715.138559] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-008d5d67-cb82-4672-af1b-6d9c3e4bb77a tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 715.140850] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df0e3358-5ac3-4d9b-ae0e-a9934caca5e9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.154576] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-008d5d67-cb82-4672-af1b-6d9c3e4bb77a tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 715.154906] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7e31799a-9cea-48be-b3ea-fa45cffa8b10 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.158641] env[69328]: DEBUG nova.network.neutron [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 715.165769] env[69328]: DEBUG oslo_vmware.api [None req-008d5d67-cb82-4672-af1b-6d9c3e4bb77a tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Waiting for the task: (returnval){ [ 715.165769] env[69328]: value = "task-3273017" [ 715.165769] env[69328]: _type = "Task" [ 715.165769] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.174564] env[69328]: DEBUG oslo_vmware.api [None req-008d5d67-cb82-4672-af1b-6d9c3e4bb77a tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Task: {'id': task-3273017, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.178222] env[69328]: DEBUG oslo_vmware.api [None req-efc48e49-d6ac-4e1b-af1c-197154841ab9 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273015, 'name': CloneVM_Task} progress is 94%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.205225] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Acquiring lock "3b4b6687-fb6d-4bb7-8604-20a3ba706ff3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 715.205499] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Lock "3b4b6687-fb6d-4bb7-8604-20a3ba706ff3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 715.212628] env[69328]: DEBUG nova.compute.manager [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 715.272183] env[69328]: DEBUG oslo_vmware.api [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273016, 'name': ReconfigVM_Task, 'duration_secs': 0.802444} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.275094] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Reconfigured VM instance instance-0000001d to attach disk [datastore1] b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25/b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 715.276522] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-36fa62f1-4478-4fa2-8d31-3067df25d6fd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.286304] env[69328]: DEBUG oslo_vmware.api [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 715.286304] env[69328]: value = "task-3273018" [ 715.286304] env[69328]: _type = "Task" [ 715.286304] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.303108] env[69328]: DEBUG oslo_vmware.api [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273018, 'name': Rename_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.442156] env[69328]: DEBUG nova.network.neutron [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] Updating instance_info_cache with network_info: [{"id": "60d09662-fefa-479c-b18f-6c4109ede4e4", "address": "fa:16:3e:db:29:ec", "network": {"id": "a4231ba3-2b54-4dd9-82bc-772b0823748c", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-322499105-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "435a67cec87842678e6c1c354ab09bd7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "195e328b-e41a-49f5-9e51-546b8ea8ceba", "external-id": "nsx-vlan-transportzone-735", "segmentation_id": 735, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60d09662-fe", "ovs_interfaceid": "60d09662-fefa-479c-b18f-6c4109ede4e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 715.678281] env[69328]: DEBUG oslo_vmware.api [None req-efc48e49-d6ac-4e1b-af1c-197154841ab9 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273015, 'name': CloneVM_Task} progress is 95%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.680486] env[69328]: DEBUG oslo_vmware.api [None req-008d5d67-cb82-4672-af1b-6d9c3e4bb77a tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Task: {'id': task-3273017, 'name': PowerOffVM_Task, 'duration_secs': 0.214621} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.683172] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-008d5d67-cb82-4672-af1b-6d9c3e4bb77a tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 715.683354] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-008d5d67-cb82-4672-af1b-6d9c3e4bb77a tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 715.683796] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3165077e-4fa6-4356-9ace-456ad4dfee1f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.739133] env[69328]: DEBUG nova.network.neutron [None req-2f602ab6-90e7-4def-b053-78444d5903e1 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Updating instance_info_cache with network_info: [{"id": "f159b639-986d-4584-94e0-589e890cb653", "address": "fa:16:3e:78:ba:5e", "network": {"id": "b7b15f77-0584-4f19-a05e-67df3efe1b9d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-778653716-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8502178b3d334c338b63dfde3eae8f08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d275d7c6-2a7b-4ee8-b6f4-fabf1ba1905f", "external-id": "nsx-vlan-transportzone-513", "segmentation_id": 513, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf159b639-98", "ovs_interfaceid": "f159b639-986d-4584-94e0-589e890cb653", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 715.766127] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b48b918-db49-4955-9278-00c552152311 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.774540] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-008d5d67-cb82-4672-af1b-6d9c3e4bb77a tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 715.774763] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-008d5d67-cb82-4672-af1b-6d9c3e4bb77a tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 715.775213] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-008d5d67-cb82-4672-af1b-6d9c3e4bb77a tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Deleting the datastore file [datastore2] bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 715.775568] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2a4f4068-dcd9-4ba9-889d-63fba3543519 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.780355] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2d21030-a8a3-4bdc-a719-17c211fe1d2a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.784737] env[69328]: DEBUG oslo_vmware.api [None req-008d5d67-cb82-4672-af1b-6d9c3e4bb77a tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Waiting for the task: (returnval){ [ 715.784737] env[69328]: value = "task-3273020" [ 715.784737] env[69328]: _type = "Task" [ 715.784737] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.816781] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b35a4d62-6d51-4a82-8559-b704ef24866c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.822425] env[69328]: DEBUG oslo_vmware.api [None req-008d5d67-cb82-4672-af1b-6d9c3e4bb77a tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Task: {'id': task-3273020, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.827127] env[69328]: DEBUG oslo_vmware.api [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273018, 'name': Rename_Task, 'duration_secs': 0.26864} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.831628] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 715.831628] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-060ed4c6-f00d-445e-9fc1-221cb0d93b34 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.832372] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-299175a8-67f9-46bd-beb3-a48beb47b9dd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.846478] env[69328]: DEBUG nova.compute.provider_tree [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 715.848910] env[69328]: DEBUG oslo_vmware.api [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 715.848910] env[69328]: value = "task-3273021" [ 715.848910] env[69328]: _type = "Task" [ 715.848910] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.856946] env[69328]: DEBUG oslo_vmware.api [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273021, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.949382] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Releasing lock "refresh_cache-1e7e9e6e-c084-480c-8653-8441c13d7514" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 715.949495] env[69328]: DEBUG nova.compute.manager [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] Instance network_info: |[{"id": "60d09662-fefa-479c-b18f-6c4109ede4e4", "address": "fa:16:3e:db:29:ec", "network": {"id": "a4231ba3-2b54-4dd9-82bc-772b0823748c", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-322499105-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "435a67cec87842678e6c1c354ab09bd7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "195e328b-e41a-49f5-9e51-546b8ea8ceba", "external-id": "nsx-vlan-transportzone-735", "segmentation_id": 735, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60d09662-fe", "ovs_interfaceid": "60d09662-fefa-479c-b18f-6c4109ede4e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 715.949746] env[69328]: DEBUG oslo_concurrency.lockutils [req-18eeaa78-436e-492e-9bdf-48b49098c2f4 req-92e131a6-2731-4a5a-8a4a-57176fd4197c service nova] Acquired lock "refresh_cache-1e7e9e6e-c084-480c-8653-8441c13d7514" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 715.949952] env[69328]: DEBUG nova.network.neutron [req-18eeaa78-436e-492e-9bdf-48b49098c2f4 req-92e131a6-2731-4a5a-8a4a-57176fd4197c service nova] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] Refreshing network info cache for port 60d09662-fefa-479c-b18f-6c4109ede4e4 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 715.951236] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:db:29:ec', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '195e328b-e41a-49f5-9e51-546b8ea8ceba', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '60d09662-fefa-479c-b18f-6c4109ede4e4', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 715.958944] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 715.962204] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 715.962719] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-332e0ac2-5b8c-47fa-9496-4ea798a1c537 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.983668] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 715.983668] env[69328]: value = "task-3273022" [ 715.983668] env[69328]: _type = "Task" [ 715.983668] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.003394] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273022, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.182750] env[69328]: DEBUG oslo_vmware.api [None req-efc48e49-d6ac-4e1b-af1c-197154841ab9 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273015, 'name': CloneVM_Task, 'duration_secs': 1.654472} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.183157] env[69328]: INFO nova.virt.vmwareapi.vmops [None req-efc48e49-d6ac-4e1b-af1c-197154841ab9 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Created linked-clone VM from snapshot [ 716.184830] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8bf531b-a8b4-40b8-93c4-8050e0a10b73 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.191889] env[69328]: DEBUG nova.virt.vmwareapi.images [None req-efc48e49-d6ac-4e1b-af1c-197154841ab9 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Uploading image f0fc2ea0-fd94-45a7-a139-94dd13377914 {{(pid=69328) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 716.208088] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-efc48e49-d6ac-4e1b-af1c-197154841ab9 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Destroying the VM {{(pid=69328) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 716.208187] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-4d18a44f-ff94-4dda-a708-96a65fd9b949 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.216640] env[69328]: DEBUG oslo_vmware.api [None req-efc48e49-d6ac-4e1b-af1c-197154841ab9 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Waiting for the task: (returnval){ [ 716.216640] env[69328]: value = "task-3273024" [ 716.216640] env[69328]: _type = "Task" [ 716.216640] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.223067] env[69328]: DEBUG nova.compute.manager [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 716.231187] env[69328]: DEBUG oslo_vmware.api [None req-efc48e49-d6ac-4e1b-af1c-197154841ab9 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273024, 'name': Destroy_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.232242] env[69328]: DEBUG nova.network.neutron [req-18eeaa78-436e-492e-9bdf-48b49098c2f4 req-92e131a6-2731-4a5a-8a4a-57176fd4197c service nova] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] Updated VIF entry in instance network info cache for port 60d09662-fefa-479c-b18f-6c4109ede4e4. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 716.232566] env[69328]: DEBUG nova.network.neutron [req-18eeaa78-436e-492e-9bdf-48b49098c2f4 req-92e131a6-2731-4a5a-8a4a-57176fd4197c service nova] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] Updating instance_info_cache with network_info: [{"id": "60d09662-fefa-479c-b18f-6c4109ede4e4", "address": "fa:16:3e:db:29:ec", "network": {"id": "a4231ba3-2b54-4dd9-82bc-772b0823748c", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-322499105-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "435a67cec87842678e6c1c354ab09bd7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "195e328b-e41a-49f5-9e51-546b8ea8ceba", "external-id": "nsx-vlan-transportzone-735", "segmentation_id": 735, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60d09662-fe", "ovs_interfaceid": "60d09662-fefa-479c-b18f-6c4109ede4e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 716.243610] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2f602ab6-90e7-4def-b053-78444d5903e1 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Releasing lock "refresh_cache-e1eec0ce-8df7-402a-b628-5dfdc11949e7" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 716.255682] env[69328]: DEBUG nova.virt.hardware [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 716.255982] env[69328]: DEBUG nova.virt.hardware [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 716.256159] env[69328]: DEBUG nova.virt.hardware [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 716.256346] env[69328]: DEBUG nova.virt.hardware [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 716.256488] env[69328]: DEBUG nova.virt.hardware [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 716.256630] env[69328]: DEBUG nova.virt.hardware [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 716.256860] env[69328]: DEBUG nova.virt.hardware [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 716.257010] env[69328]: DEBUG nova.virt.hardware [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 716.257182] env[69328]: DEBUG nova.virt.hardware [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 716.257436] env[69328]: DEBUG nova.virt.hardware [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 716.257501] env[69328]: DEBUG nova.virt.hardware [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 716.258779] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36e4461e-a731-4e3c-adc1-bb3dc8730129 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.270368] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7f9bf51-5835-4e85-a1e5-e1c5a62ca7ed {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.296942] env[69328]: DEBUG oslo_vmware.api [None req-008d5d67-cb82-4672-af1b-6d9c3e4bb77a tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Task: {'id': task-3273020, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.28081} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.297224] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-008d5d67-cb82-4672-af1b-6d9c3e4bb77a tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 716.297418] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-008d5d67-cb82-4672-af1b-6d9c3e4bb77a tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 716.297595] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-008d5d67-cb82-4672-af1b-6d9c3e4bb77a tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 716.297773] env[69328]: INFO nova.compute.manager [None req-008d5d67-cb82-4672-af1b-6d9c3e4bb77a tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Took 1.16 seconds to destroy the instance on the hypervisor. [ 716.298040] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-008d5d67-cb82-4672-af1b-6d9c3e4bb77a tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 716.298244] env[69328]: DEBUG nova.compute.manager [-] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 716.298334] env[69328]: DEBUG nova.network.neutron [-] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 716.365931] env[69328]: DEBUG oslo_vmware.api [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273021, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.385086] env[69328]: ERROR nova.scheduler.client.report [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [req-41915afd-149f-4ea2-887c-2ac1a95ca567] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-41915afd-149f-4ea2-887c-2ac1a95ca567"}]} [ 716.408588] env[69328]: DEBUG nova.scheduler.client.report [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Refreshing inventories for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 716.424097] env[69328]: DEBUG nova.scheduler.client.report [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Updating ProviderTree inventory for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 716.424339] env[69328]: DEBUG nova.compute.provider_tree [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 716.440589] env[69328]: DEBUG nova.scheduler.client.report [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Refreshing aggregate associations for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e, aggregates: None {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 716.467822] env[69328]: DEBUG nova.scheduler.client.report [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Refreshing trait associations for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e, traits: COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 716.494235] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273022, 'name': CreateVM_Task, 'duration_secs': 0.3752} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.494235] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 716.495439] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 716.495807] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 716.496219] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 716.496684] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aeb2bd43-e240-486c-a82c-af908ba18b3e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.506717] env[69328]: DEBUG oslo_vmware.api [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Waiting for the task: (returnval){ [ 716.506717] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52bf5566-b4e5-9220-d80a-0fd5baa8abbc" [ 716.506717] env[69328]: _type = "Task" [ 716.506717] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.516813] env[69328]: DEBUG oslo_vmware.api [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52bf5566-b4e5-9220-d80a-0fd5baa8abbc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.732759] env[69328]: DEBUG oslo_vmware.api [None req-efc48e49-d6ac-4e1b-af1c-197154841ab9 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273024, 'name': Destroy_Task} progress is 33%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.736424] env[69328]: DEBUG oslo_concurrency.lockutils [req-18eeaa78-436e-492e-9bdf-48b49098c2f4 req-92e131a6-2731-4a5a-8a4a-57176fd4197c service nova] Releasing lock "refresh_cache-1e7e9e6e-c084-480c-8653-8441c13d7514" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 716.751116] env[69328]: DEBUG nova.compute.manager [None req-2f602ab6-90e7-4def-b053-78444d5903e1 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 716.752386] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c6daf56-dcc6-4e3f-84f8-168f2203b543 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.863404] env[69328]: DEBUG oslo_vmware.api [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273021, 'name': PowerOnVM_Task, 'duration_secs': 0.538127} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.863689] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 716.863886] env[69328]: DEBUG nova.compute.manager [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 716.864682] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e9a5745-157d-4112-ae60-a135a9f2eeaa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.017570] env[69328]: DEBUG oslo_vmware.api [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52bf5566-b4e5-9220-d80a-0fd5baa8abbc, 'name': SearchDatastore_Task, 'duration_secs': 0.013847} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.018013] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 717.018380] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 717.018834] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 717.019120] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 717.019403] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 717.019776] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dc9555b3-81be-43f6-b0f4-008782a86f5f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.032248] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20db7f32-13e8-4c0a-8f35-d4ba35ffdab7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.034501] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 717.034823] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 717.036310] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7eb0bfda-ebb2-436c-a52d-b2cc82a1a87a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.046919] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f33f84a-8050-4884-a75d-43899c715d65 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.051435] env[69328]: DEBUG nova.compute.manager [req-fbb3a290-13d0-4a50-a0be-c924ec1fc0e6 req-76d0b9dd-f476-4ed8-a940-ec3ff88bfd62 service nova] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] Received event network-vif-plugged-dd70e166-7f6d-4b58-b33d-e1c74a5da1f1 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 717.051818] env[69328]: DEBUG oslo_concurrency.lockutils [req-fbb3a290-13d0-4a50-a0be-c924ec1fc0e6 req-76d0b9dd-f476-4ed8-a940-ec3ff88bfd62 service nova] Acquiring lock "146a3eef-0971-4f6e-bd24-58b38a1de0ed-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 717.052263] env[69328]: DEBUG oslo_concurrency.lockutils [req-fbb3a290-13d0-4a50-a0be-c924ec1fc0e6 req-76d0b9dd-f476-4ed8-a940-ec3ff88bfd62 service nova] Lock "146a3eef-0971-4f6e-bd24-58b38a1de0ed-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 717.052744] env[69328]: DEBUG oslo_concurrency.lockutils [req-fbb3a290-13d0-4a50-a0be-c924ec1fc0e6 req-76d0b9dd-f476-4ed8-a940-ec3ff88bfd62 service nova] Lock "146a3eef-0971-4f6e-bd24-58b38a1de0ed-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 717.054242] env[69328]: DEBUG nova.compute.manager [req-fbb3a290-13d0-4a50-a0be-c924ec1fc0e6 req-76d0b9dd-f476-4ed8-a940-ec3ff88bfd62 service nova] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] No waiting events found dispatching network-vif-plugged-dd70e166-7f6d-4b58-b33d-e1c74a5da1f1 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 717.054576] env[69328]: WARNING nova.compute.manager [req-fbb3a290-13d0-4a50-a0be-c924ec1fc0e6 req-76d0b9dd-f476-4ed8-a940-ec3ff88bfd62 service nova] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] Received unexpected event network-vif-plugged-dd70e166-7f6d-4b58-b33d-e1c74a5da1f1 for instance with vm_state building and task_state spawning. [ 717.055490] env[69328]: DEBUG oslo_vmware.api [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Waiting for the task: (returnval){ [ 717.055490] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52cbf83a-b237-affc-f142-3e5371ae2fe4" [ 717.055490] env[69328]: _type = "Task" [ 717.055490] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.102439] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9401aa6b-0da2-4949-a477-7f2708db3a48 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.107642] env[69328]: DEBUG oslo_vmware.api [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52cbf83a-b237-affc-f142-3e5371ae2fe4, 'name': SearchDatastore_Task, 'duration_secs': 0.010067} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.109324] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-331f517e-c875-418a-967c-ec8678a3316d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.115796] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83a1f850-2407-491f-9ebd-cb4cbe6c19ba {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.123384] env[69328]: DEBUG oslo_vmware.api [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Waiting for the task: (returnval){ [ 717.123384] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5253ccd6-d29a-058e-3ae8-5dba51fa2493" [ 717.123384] env[69328]: _type = "Task" [ 717.123384] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.137956] env[69328]: DEBUG nova.compute.provider_tree [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 717.145877] env[69328]: DEBUG oslo_vmware.api [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5253ccd6-d29a-058e-3ae8-5dba51fa2493, 'name': SearchDatastore_Task, 'duration_secs': 0.010193} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.145877] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 717.145877] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 1e7e9e6e-c084-480c-8653-8441c13d7514/1e7e9e6e-c084-480c-8653-8441c13d7514.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 717.145877] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-72292b03-e146-401c-b585-a24b1e6086df {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.149761] env[69328]: DEBUG nova.network.neutron [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] Successfully updated port: dd70e166-7f6d-4b58-b33d-e1c74a5da1f1 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 717.153253] env[69328]: DEBUG oslo_vmware.api [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Waiting for the task: (returnval){ [ 717.153253] env[69328]: value = "task-3273025" [ 717.153253] env[69328]: _type = "Task" [ 717.153253] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.162928] env[69328]: DEBUG oslo_vmware.api [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3273025, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.226528] env[69328]: DEBUG oslo_vmware.api [None req-efc48e49-d6ac-4e1b-af1c-197154841ab9 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273024, 'name': Destroy_Task, 'duration_secs': 0.707197} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.226804] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-efc48e49-d6ac-4e1b-af1c-197154841ab9 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Destroyed the VM [ 717.227055] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-efc48e49-d6ac-4e1b-af1c-197154841ab9 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Deleting Snapshot of the VM instance {{(pid=69328) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 717.227325] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-a3667d23-fbb0-4746-9773-c38cf9daba23 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.233378] env[69328]: DEBUG oslo_vmware.api [None req-efc48e49-d6ac-4e1b-af1c-197154841ab9 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Waiting for the task: (returnval){ [ 717.233378] env[69328]: value = "task-3273026" [ 717.233378] env[69328]: _type = "Task" [ 717.233378] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.241595] env[69328]: DEBUG oslo_vmware.api [None req-efc48e49-d6ac-4e1b-af1c-197154841ab9 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273026, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.339781] env[69328]: DEBUG nova.compute.manager [None req-f7e862ec-1d8a-45e3-bd24-c67aaf7f2c3e tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 717.340799] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6be923a5-ffd2-402e-84ef-ff0efc51a0aa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.393304] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 717.602846] env[69328]: DEBUG nova.network.neutron [-] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.655666] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Acquiring lock "refresh_cache-146a3eef-0971-4f6e-bd24-58b38a1de0ed" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 717.655666] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Acquired lock "refresh_cache-146a3eef-0971-4f6e-bd24-58b38a1de0ed" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 717.655666] env[69328]: DEBUG nova.network.neutron [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 717.664931] env[69328]: DEBUG oslo_vmware.api [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3273025, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.46003} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.665107] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 1e7e9e6e-c084-480c-8653-8441c13d7514/1e7e9e6e-c084-480c-8653-8441c13d7514.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 717.665327] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 717.665703] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-19e7ef37-7631-46b5-b8d6-374eec680cfd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.675797] env[69328]: DEBUG oslo_vmware.api [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Waiting for the task: (returnval){ [ 717.675797] env[69328]: value = "task-3273027" [ 717.675797] env[69328]: _type = "Task" [ 717.675797] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.676715] env[69328]: DEBUG nova.scheduler.client.report [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Updated inventory for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with generation 61 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 717.676948] env[69328]: DEBUG nova.compute.provider_tree [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Updating resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e generation from 61 to 62 during operation: update_inventory {{(pid=69328) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 717.677163] env[69328]: DEBUG nova.compute.provider_tree [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 717.691695] env[69328]: DEBUG oslo_vmware.api [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3273027, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.743904] env[69328]: DEBUG oslo_vmware.api [None req-efc48e49-d6ac-4e1b-af1c-197154841ab9 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273026, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.776049] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15c7ffb5-d791-4919-8a6e-7444ff1f7e9c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.783652] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2f602ab6-90e7-4def-b053-78444d5903e1 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Doing hard reboot of VM {{(pid=69328) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 717.783652] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-57aae54e-ba44-4422-bc9a-f697a30cf796 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.789628] env[69328]: DEBUG oslo_vmware.api [None req-2f602ab6-90e7-4def-b053-78444d5903e1 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Waiting for the task: (returnval){ [ 717.789628] env[69328]: value = "task-3273028" [ 717.789628] env[69328]: _type = "Task" [ 717.789628] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.803022] env[69328]: DEBUG oslo_vmware.api [None req-2f602ab6-90e7-4def-b053-78444d5903e1 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Task: {'id': task-3273028, 'name': ResetVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.854816] env[69328]: INFO nova.compute.manager [None req-f7e862ec-1d8a-45e3-bd24-c67aaf7f2c3e tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] instance snapshotting [ 717.857554] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d4457ab-0860-42f9-83d5-91ae3035337f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.876831] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4036e257-196e-4526-aba5-d21bbc165c0c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.106915] env[69328]: INFO nova.compute.manager [-] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Took 1.81 seconds to deallocate network for instance. [ 718.135817] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-0636358c-b911-4573-b470-aef12f83ed4f tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Volume attach. Driver type: vmdk {{(pid=69328) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 718.136103] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-0636358c-b911-4573-b470-aef12f83ed4f tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653755', 'volume_id': '99338b52-3801-47a7-ab57-21495a480b27', 'name': 'volume-99338b52-3801-47a7-ab57-21495a480b27', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'bc9c3a41-7264-4d69-bc15-397b5fa0a8ad', 'attached_at': '', 'detached_at': '', 'volume_id': '99338b52-3801-47a7-ab57-21495a480b27', 'serial': '99338b52-3801-47a7-ab57-21495a480b27'} {{(pid=69328) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 718.137078] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1393f144-0510-4e18-841b-9c98082d7a29 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.157561] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d0f25dd-9dec-42d5-9fb9-e9cfbfcf9d4c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.188486] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-0636358c-b911-4573-b470-aef12f83ed4f tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Reconfiguring VM instance instance-00000018 to attach disk [datastore1] volume-99338b52-3801-47a7-ab57-21495a480b27/volume-99338b52-3801-47a7-ab57-21495a480b27.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 718.189384] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.991s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 718.189894] env[69328]: DEBUG nova.compute.manager [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 718.195968] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2b16d110-f203-497c-9859-9a4b07976a61 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.209204] env[69328]: DEBUG nova.network.neutron [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 718.211680] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.809s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 718.213308] env[69328]: INFO nova.compute.claims [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 718.222299] env[69328]: DEBUG oslo_vmware.api [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3273027, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.102645} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.222768] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 718.223167] env[69328]: DEBUG oslo_vmware.api [None req-0636358c-b911-4573-b470-aef12f83ed4f tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for the task: (returnval){ [ 718.223167] env[69328]: value = "task-3273029" [ 718.223167] env[69328]: _type = "Task" [ 718.223167] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.224377] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-291941a6-06a8-45d6-9342-389c89fc67a5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.262494] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] Reconfiguring VM instance instance-00000020 to attach disk [datastore1] 1e7e9e6e-c084-480c-8653-8441c13d7514/1e7e9e6e-c084-480c-8653-8441c13d7514.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 718.262928] env[69328]: DEBUG oslo_vmware.api [None req-0636358c-b911-4573-b470-aef12f83ed4f tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3273029, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.269253] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3eceec7e-c5fd-44bd-8def-cd3da4ecc1bd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.295493] env[69328]: DEBUG oslo_vmware.api [None req-efc48e49-d6ac-4e1b-af1c-197154841ab9 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273026, 'name': RemoveSnapshot_Task, 'duration_secs': 0.718247} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.295870] env[69328]: DEBUG oslo_vmware.api [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Waiting for the task: (returnval){ [ 718.295870] env[69328]: value = "task-3273030" [ 718.295870] env[69328]: _type = "Task" [ 718.295870] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.299276] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-efc48e49-d6ac-4e1b-af1c-197154841ab9 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Deleted Snapshot of the VM instance {{(pid=69328) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 718.310046] env[69328]: DEBUG oslo_vmware.api [None req-2f602ab6-90e7-4def-b053-78444d5903e1 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Task: {'id': task-3273028, 'name': ResetVM_Task, 'duration_secs': 0.097672} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.314243] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2f602ab6-90e7-4def-b053-78444d5903e1 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Did hard reboot of VM {{(pid=69328) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 718.314477] env[69328]: DEBUG nova.compute.manager [None req-2f602ab6-90e7-4def-b053-78444d5903e1 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 718.314846] env[69328]: DEBUG oslo_vmware.api [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3273030, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.315596] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad804896-2934-48ed-b21d-3370d5877b6a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.387498] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f7e862ec-1d8a-45e3-bd24-c67aaf7f2c3e tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Creating Snapshot of the VM instance {{(pid=69328) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 718.388068] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-ce57e88d-0c48-423c-8f76-c123de3e3b84 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.395422] env[69328]: DEBUG oslo_vmware.api [None req-f7e862ec-1d8a-45e3-bd24-c67aaf7f2c3e tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Waiting for the task: (returnval){ [ 718.395422] env[69328]: value = "task-3273031" [ 718.395422] env[69328]: _type = "Task" [ 718.395422] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.403983] env[69328]: DEBUG oslo_vmware.api [None req-f7e862ec-1d8a-45e3-bd24-c67aaf7f2c3e tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3273031, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.476512] env[69328]: DEBUG nova.network.neutron [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] Updating instance_info_cache with network_info: [{"id": "dd70e166-7f6d-4b58-b33d-e1c74a5da1f1", "address": "fa:16:3e:90:9f:87", "network": {"id": "a4231ba3-2b54-4dd9-82bc-772b0823748c", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-322499105-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "435a67cec87842678e6c1c354ab09bd7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "195e328b-e41a-49f5-9e51-546b8ea8ceba", "external-id": "nsx-vlan-transportzone-735", "segmentation_id": 735, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd70e166-7f", "ovs_interfaceid": "dd70e166-7f6d-4b58-b33d-e1c74a5da1f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 718.610349] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6f98f3ad-8c53-48d7-a859-d056e9f43bbd tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 718.610621] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6f98f3ad-8c53-48d7-a859-d056e9f43bbd tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 718.610870] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6f98f3ad-8c53-48d7-a859-d056e9f43bbd tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 718.611104] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6f98f3ad-8c53-48d7-a859-d056e9f43bbd tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 718.611286] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6f98f3ad-8c53-48d7-a859-d056e9f43bbd tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 718.614960] env[69328]: INFO nova.compute.manager [None req-6f98f3ad-8c53-48d7-a859-d056e9f43bbd tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Terminating instance [ 718.616938] env[69328]: DEBUG oslo_concurrency.lockutils [None req-008d5d67-cb82-4672-af1b-6d9c3e4bb77a tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 718.712616] env[69328]: DEBUG nova.compute.utils [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 718.713997] env[69328]: DEBUG nova.compute.manager [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 718.714222] env[69328]: DEBUG nova.network.neutron [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 718.738447] env[69328]: DEBUG oslo_vmware.api [None req-0636358c-b911-4573-b470-aef12f83ed4f tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3273029, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.795474] env[69328]: DEBUG nova.policy [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a58627ef79fd46f09e894c28ce701fc7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2f5fc8b18f7d496aabcb51075fc4a94b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 718.806836] env[69328]: WARNING nova.compute.manager [None req-efc48e49-d6ac-4e1b-af1c-197154841ab9 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Image not found during snapshot: nova.exception.ImageNotFound: Image f0fc2ea0-fd94-45a7-a139-94dd13377914 could not be found. [ 718.813406] env[69328]: DEBUG oslo_vmware.api [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3273030, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.830835] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2f602ab6-90e7-4def-b053-78444d5903e1 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Lock "e1eec0ce-8df7-402a-b628-5dfdc11949e7" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.534s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 718.906082] env[69328]: DEBUG oslo_vmware.api [None req-f7e862ec-1d8a-45e3-bd24-c67aaf7f2c3e tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3273031, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.980206] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Releasing lock "refresh_cache-146a3eef-0971-4f6e-bd24-58b38a1de0ed" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 718.980680] env[69328]: DEBUG nova.compute.manager [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] Instance network_info: |[{"id": "dd70e166-7f6d-4b58-b33d-e1c74a5da1f1", "address": "fa:16:3e:90:9f:87", "network": {"id": "a4231ba3-2b54-4dd9-82bc-772b0823748c", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-322499105-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "435a67cec87842678e6c1c354ab09bd7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "195e328b-e41a-49f5-9e51-546b8ea8ceba", "external-id": "nsx-vlan-transportzone-735", "segmentation_id": 735, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd70e166-7f", "ovs_interfaceid": "dd70e166-7f6d-4b58-b33d-e1c74a5da1f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 718.981322] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:90:9f:87', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '195e328b-e41a-49f5-9e51-546b8ea8ceba', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dd70e166-7f6d-4b58-b33d-e1c74a5da1f1', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 718.990614] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 718.990860] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 718.991137] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1c364c8b-b039-419b-a007-2476f91f4864 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.014214] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 719.014214] env[69328]: value = "task-3273032" [ 719.014214] env[69328]: _type = "Task" [ 719.014214] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.025156] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273032, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.121792] env[69328]: DEBUG nova.compute.manager [None req-6f98f3ad-8c53-48d7-a859-d056e9f43bbd tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 719.122092] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-6f98f3ad-8c53-48d7-a859-d056e9f43bbd tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 719.123087] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe99930f-2faa-4a55-af22-33ca5a2ba8ca {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.134696] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f98f3ad-8c53-48d7-a859-d056e9f43bbd tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 719.136110] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1206154f-0c60-44d3-8233-e899d3072d3a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.145276] env[69328]: DEBUG oslo_vmware.api [None req-6f98f3ad-8c53-48d7-a859-d056e9f43bbd tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 719.145276] env[69328]: value = "task-3273033" [ 719.145276] env[69328]: _type = "Task" [ 719.145276] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.159880] env[69328]: DEBUG oslo_vmware.api [None req-6f98f3ad-8c53-48d7-a859-d056e9f43bbd tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273033, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.174185] env[69328]: DEBUG oslo_concurrency.lockutils [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "d10bee67-6294-4537-9ce7-4eedb8361ddc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 719.174331] env[69328]: DEBUG oslo_concurrency.lockutils [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "d10bee67-6294-4537-9ce7-4eedb8361ddc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 719.193139] env[69328]: DEBUG nova.network.neutron [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] Successfully created port: a1ba4448-3cab-4866-81f4-785bd26580b2 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 719.218315] env[69328]: DEBUG nova.compute.manager [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 719.237746] env[69328]: DEBUG oslo_vmware.api [None req-0636358c-b911-4573-b470-aef12f83ed4f tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3273029, 'name': ReconfigVM_Task, 'duration_secs': 0.877476} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.238421] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-0636358c-b911-4573-b470-aef12f83ed4f tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Reconfigured VM instance instance-00000018 to attach disk [datastore1] volume-99338b52-3801-47a7-ab57-21495a480b27/volume-99338b52-3801-47a7-ab57-21495a480b27.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 719.245526] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5cabf93e-8d13-4188-b6a2-560b52111bcf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.268728] env[69328]: DEBUG oslo_vmware.api [None req-0636358c-b911-4573-b470-aef12f83ed4f tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for the task: (returnval){ [ 719.268728] env[69328]: value = "task-3273034" [ 719.268728] env[69328]: _type = "Task" [ 719.268728] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.279413] env[69328]: DEBUG oslo_vmware.api [None req-0636358c-b911-4573-b470-aef12f83ed4f tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3273034, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.313210] env[69328]: DEBUG oslo_vmware.api [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3273030, 'name': ReconfigVM_Task, 'duration_secs': 0.76909} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.313210] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] Reconfigured VM instance instance-00000020 to attach disk [datastore1] 1e7e9e6e-c084-480c-8653-8441c13d7514/1e7e9e6e-c084-480c-8653-8441c13d7514.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 719.317136] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6b527d53-a6f7-432f-ae94-ae373c37f55f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.327200] env[69328]: DEBUG oslo_vmware.api [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Waiting for the task: (returnval){ [ 719.327200] env[69328]: value = "task-3273035" [ 719.327200] env[69328]: _type = "Task" [ 719.327200] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.343620] env[69328]: DEBUG oslo_vmware.api [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3273035, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.346440] env[69328]: DEBUG nova.compute.manager [req-38639a3f-a813-4da6-9345-0d40c6318d26 req-20145f02-98a8-4281-9623-51fafc3f460d service nova] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Received event network-vif-deleted-e401a888-b320-4f5f-bcdc-5d8c86b99ce7 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 719.346589] env[69328]: DEBUG nova.compute.manager [req-38639a3f-a813-4da6-9345-0d40c6318d26 req-20145f02-98a8-4281-9623-51fafc3f460d service nova] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] Received event network-changed-dd70e166-7f6d-4b58-b33d-e1c74a5da1f1 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 719.346749] env[69328]: DEBUG nova.compute.manager [req-38639a3f-a813-4da6-9345-0d40c6318d26 req-20145f02-98a8-4281-9623-51fafc3f460d service nova] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] Refreshing instance network info cache due to event network-changed-dd70e166-7f6d-4b58-b33d-e1c74a5da1f1. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 719.347048] env[69328]: DEBUG oslo_concurrency.lockutils [req-38639a3f-a813-4da6-9345-0d40c6318d26 req-20145f02-98a8-4281-9623-51fafc3f460d service nova] Acquiring lock "refresh_cache-146a3eef-0971-4f6e-bd24-58b38a1de0ed" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 719.347132] env[69328]: DEBUG oslo_concurrency.lockutils [req-38639a3f-a813-4da6-9345-0d40c6318d26 req-20145f02-98a8-4281-9623-51fafc3f460d service nova] Acquired lock "refresh_cache-146a3eef-0971-4f6e-bd24-58b38a1de0ed" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 719.347290] env[69328]: DEBUG nova.network.neutron [req-38639a3f-a813-4da6-9345-0d40c6318d26 req-20145f02-98a8-4281-9623-51fafc3f460d service nova] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] Refreshing network info cache for port dd70e166-7f6d-4b58-b33d-e1c74a5da1f1 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 719.409493] env[69328]: DEBUG oslo_vmware.api [None req-f7e862ec-1d8a-45e3-bd24-c67aaf7f2c3e tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3273031, 'name': CreateSnapshot_Task, 'duration_secs': 0.948297} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.409725] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f7e862ec-1d8a-45e3-bd24-c67aaf7f2c3e tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Created Snapshot of the VM instance {{(pid=69328) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 719.410931] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7761222d-4e47-4e71-917d-8189f88eba7a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.525890] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273032, 'name': CreateVM_Task, 'duration_secs': 0.382074} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.528820] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 719.531228] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 719.531467] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 719.531877] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 719.532196] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b5641224-dd3e-4ba6-8aaa-e47d1044783d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.537047] env[69328]: DEBUG oslo_vmware.api [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Waiting for the task: (returnval){ [ 719.537047] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5297d17d-f748-1dc9-9680-a3c73b572d42" [ 719.537047] env[69328]: _type = "Task" [ 719.537047] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.550106] env[69328]: DEBUG oslo_vmware.api [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5297d17d-f748-1dc9-9680-a3c73b572d42, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.587043] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7594ffc6-3bcd-41fa-867d-b0ee1a8dc8cd tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Acquiring lock "8e3a73c1-b622-47f4-99af-71b6dba7c09b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 719.587336] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7594ffc6-3bcd-41fa-867d-b0ee1a8dc8cd tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Lock "8e3a73c1-b622-47f4-99af-71b6dba7c09b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 719.587735] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7594ffc6-3bcd-41fa-867d-b0ee1a8dc8cd tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Acquiring lock "8e3a73c1-b622-47f4-99af-71b6dba7c09b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 719.587964] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7594ffc6-3bcd-41fa-867d-b0ee1a8dc8cd tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Lock "8e3a73c1-b622-47f4-99af-71b6dba7c09b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 719.588147] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7594ffc6-3bcd-41fa-867d-b0ee1a8dc8cd tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Lock "8e3a73c1-b622-47f4-99af-71b6dba7c09b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 719.590135] env[69328]: INFO nova.compute.manager [None req-7594ffc6-3bcd-41fa-867d-b0ee1a8dc8cd tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Terminating instance [ 719.622610] env[69328]: DEBUG nova.compute.manager [req-8227ba52-5788-4a31-8a96-5223c46c3b07 req-604b957d-5111-4a3e-b519-71ea363362b7 service nova] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Received event network-changed-f159b639-986d-4584-94e0-589e890cb653 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 719.622948] env[69328]: DEBUG nova.compute.manager [req-8227ba52-5788-4a31-8a96-5223c46c3b07 req-604b957d-5111-4a3e-b519-71ea363362b7 service nova] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Refreshing instance network info cache due to event network-changed-f159b639-986d-4584-94e0-589e890cb653. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 719.623615] env[69328]: DEBUG oslo_concurrency.lockutils [req-8227ba52-5788-4a31-8a96-5223c46c3b07 req-604b957d-5111-4a3e-b519-71ea363362b7 service nova] Acquiring lock "refresh_cache-e1eec0ce-8df7-402a-b628-5dfdc11949e7" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 719.623615] env[69328]: DEBUG oslo_concurrency.lockutils [req-8227ba52-5788-4a31-8a96-5223c46c3b07 req-604b957d-5111-4a3e-b519-71ea363362b7 service nova] Acquired lock "refresh_cache-e1eec0ce-8df7-402a-b628-5dfdc11949e7" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 719.623615] env[69328]: DEBUG nova.network.neutron [req-8227ba52-5788-4a31-8a96-5223c46c3b07 req-604b957d-5111-4a3e-b519-71ea363362b7 service nova] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Refreshing network info cache for port f159b639-986d-4584-94e0-589e890cb653 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 719.658142] env[69328]: DEBUG oslo_vmware.api [None req-6f98f3ad-8c53-48d7-a859-d056e9f43bbd tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273033, 'name': PowerOffVM_Task, 'duration_secs': 0.230052} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.658142] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f98f3ad-8c53-48d7-a859-d056e9f43bbd tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 719.658142] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-6f98f3ad-8c53-48d7-a859-d056e9f43bbd tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 719.658142] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6e48b1fc-7996-4001-9c42-f05a8e9abf40 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.779790] env[69328]: DEBUG oslo_vmware.api [None req-0636358c-b911-4573-b470-aef12f83ed4f tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3273034, 'name': ReconfigVM_Task, 'duration_secs': 0.173317} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.782434] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-0636358c-b911-4573-b470-aef12f83ed4f tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653755', 'volume_id': '99338b52-3801-47a7-ab57-21495a480b27', 'name': 'volume-99338b52-3801-47a7-ab57-21495a480b27', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'bc9c3a41-7264-4d69-bc15-397b5fa0a8ad', 'attached_at': '', 'detached_at': '', 'volume_id': '99338b52-3801-47a7-ab57-21495a480b27', 'serial': '99338b52-3801-47a7-ab57-21495a480b27'} {{(pid=69328) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 719.795691] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c9e535b-e6b5-4624-8460-da10faa41dd5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.805659] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3363def5-32a9-442f-8055-7a1399750fff {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.844628] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20dfc962-1b4f-4a5c-b05c-22edfe41beb2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.860021] env[69328]: DEBUG oslo_vmware.api [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3273035, 'name': Rename_Task, 'duration_secs': 0.16715} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.860021] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 719.860021] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3fe1a7a4-b714-49a6-a2ea-3fa75d677b2f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.862219] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a6483ad-622a-4bb3-8b9d-00e91a104ac9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.876418] env[69328]: DEBUG nova.compute.provider_tree [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 719.882305] env[69328]: DEBUG oslo_vmware.api [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Waiting for the task: (returnval){ [ 719.882305] env[69328]: value = "task-3273037" [ 719.882305] env[69328]: _type = "Task" [ 719.882305] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.893620] env[69328]: DEBUG oslo_vmware.api [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3273037, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.932820] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f7e862ec-1d8a-45e3-bd24-c67aaf7f2c3e tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Creating linked-clone VM from snapshot {{(pid=69328) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 719.934299] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-6e3625bf-4d67-4bc4-98e7-9ce5423ddadd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.945774] env[69328]: DEBUG oslo_vmware.api [None req-f7e862ec-1d8a-45e3-bd24-c67aaf7f2c3e tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Waiting for the task: (returnval){ [ 719.945774] env[69328]: value = "task-3273038" [ 719.945774] env[69328]: _type = "Task" [ 719.945774] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.954074] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-6f98f3ad-8c53-48d7-a859-d056e9f43bbd tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 719.954313] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-6f98f3ad-8c53-48d7-a859-d056e9f43bbd tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 719.954493] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f98f3ad-8c53-48d7-a859-d056e9f43bbd tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Deleting the datastore file [datastore1] b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 719.954770] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-79f3859d-b988-4e0f-8f22-7b9c05b838cd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.959743] env[69328]: DEBUG oslo_vmware.api [None req-f7e862ec-1d8a-45e3-bd24-c67aaf7f2c3e tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3273038, 'name': CloneVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.964307] env[69328]: DEBUG oslo_vmware.api [None req-6f98f3ad-8c53-48d7-a859-d056e9f43bbd tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 719.964307] env[69328]: value = "task-3273039" [ 719.964307] env[69328]: _type = "Task" [ 719.964307] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.974744] env[69328]: DEBUG oslo_vmware.api [None req-6f98f3ad-8c53-48d7-a859-d056e9f43bbd tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273039, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.047722] env[69328]: DEBUG oslo_vmware.api [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5297d17d-f748-1dc9-9680-a3c73b572d42, 'name': SearchDatastore_Task, 'duration_secs': 0.011949} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.048016] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 720.048273] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 720.049256] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 720.049256] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 720.049256] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 720.049256] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4d9bb5ab-3806-4a70-959d-9872e1dc2eb6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.069562] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 720.069791] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 720.070615] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f74a577-af46-43be-9487-670d3bd128ef {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.079642] env[69328]: DEBUG oslo_vmware.api [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Waiting for the task: (returnval){ [ 720.079642] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]520cac5f-81fe-6c3f-4f58-4e4796d9d614" [ 720.079642] env[69328]: _type = "Task" [ 720.079642] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.088399] env[69328]: DEBUG oslo_vmware.api [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]520cac5f-81fe-6c3f-4f58-4e4796d9d614, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.096277] env[69328]: DEBUG nova.compute.manager [None req-7594ffc6-3bcd-41fa-867d-b0ee1a8dc8cd tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 720.096522] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7594ffc6-3bcd-41fa-867d-b0ee1a8dc8cd tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 720.097381] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-542031c4-89a1-44e0-a239-6c40ab2eafb7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.104952] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7594ffc6-3bcd-41fa-867d-b0ee1a8dc8cd tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 720.105277] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5b337ffc-4ad5-4043-ae41-3b2e39842157 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.111531] env[69328]: DEBUG oslo_vmware.api [None req-7594ffc6-3bcd-41fa-867d-b0ee1a8dc8cd tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Waiting for the task: (returnval){ [ 720.111531] env[69328]: value = "task-3273040" [ 720.111531] env[69328]: _type = "Task" [ 720.111531] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.122626] env[69328]: DEBUG oslo_vmware.api [None req-7594ffc6-3bcd-41fa-867d-b0ee1a8dc8cd tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273040, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.128304] env[69328]: DEBUG nova.network.neutron [req-38639a3f-a813-4da6-9345-0d40c6318d26 req-20145f02-98a8-4281-9623-51fafc3f460d service nova] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] Updated VIF entry in instance network info cache for port dd70e166-7f6d-4b58-b33d-e1c74a5da1f1. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 720.128974] env[69328]: DEBUG nova.network.neutron [req-38639a3f-a813-4da6-9345-0d40c6318d26 req-20145f02-98a8-4281-9623-51fafc3f460d service nova] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] Updating instance_info_cache with network_info: [{"id": "dd70e166-7f6d-4b58-b33d-e1c74a5da1f1", "address": "fa:16:3e:90:9f:87", "network": {"id": "a4231ba3-2b54-4dd9-82bc-772b0823748c", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-322499105-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "435a67cec87842678e6c1c354ab09bd7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "195e328b-e41a-49f5-9e51-546b8ea8ceba", "external-id": "nsx-vlan-transportzone-735", "segmentation_id": 735, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd70e166-7f", "ovs_interfaceid": "dd70e166-7f6d-4b58-b33d-e1c74a5da1f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 720.228902] env[69328]: DEBUG nova.compute.manager [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 720.255251] env[69328]: DEBUG nova.virt.hardware [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 720.255593] env[69328]: DEBUG nova.virt.hardware [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 720.255834] env[69328]: DEBUG nova.virt.hardware [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 720.256131] env[69328]: DEBUG nova.virt.hardware [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 720.256345] env[69328]: DEBUG nova.virt.hardware [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 720.256552] env[69328]: DEBUG nova.virt.hardware [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 720.256840] env[69328]: DEBUG nova.virt.hardware [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 720.257080] env[69328]: DEBUG nova.virt.hardware [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 720.257322] env[69328]: DEBUG nova.virt.hardware [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 720.257555] env[69328]: DEBUG nova.virt.hardware [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 720.257818] env[69328]: DEBUG nova.virt.hardware [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 720.259274] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eea23a48-6fa9-4a94-bd24-7a5233f67af8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.274551] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57cadab2-3723-474f-b7ff-d3bef6e2c5ce {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.383559] env[69328]: DEBUG nova.scheduler.client.report [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 720.387790] env[69328]: DEBUG oslo_concurrency.lockutils [None req-57dec915-e838-459b-bd12-33b5b857e7af tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Acquiring lock "e1eec0ce-8df7-402a-b628-5dfdc11949e7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 720.388015] env[69328]: DEBUG oslo_concurrency.lockutils [None req-57dec915-e838-459b-bd12-33b5b857e7af tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Lock "e1eec0ce-8df7-402a-b628-5dfdc11949e7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 720.388220] env[69328]: DEBUG oslo_concurrency.lockutils [None req-57dec915-e838-459b-bd12-33b5b857e7af tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Acquiring lock "e1eec0ce-8df7-402a-b628-5dfdc11949e7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 720.388402] env[69328]: DEBUG oslo_concurrency.lockutils [None req-57dec915-e838-459b-bd12-33b5b857e7af tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Lock "e1eec0ce-8df7-402a-b628-5dfdc11949e7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 720.388562] env[69328]: DEBUG oslo_concurrency.lockutils [None req-57dec915-e838-459b-bd12-33b5b857e7af tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Lock "e1eec0ce-8df7-402a-b628-5dfdc11949e7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 720.394477] env[69328]: INFO nova.compute.manager [None req-57dec915-e838-459b-bd12-33b5b857e7af tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Terminating instance [ 720.402559] env[69328]: DEBUG oslo_vmware.api [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3273037, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.415378] env[69328]: DEBUG nova.network.neutron [req-8227ba52-5788-4a31-8a96-5223c46c3b07 req-604b957d-5111-4a3e-b519-71ea363362b7 service nova] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Updated VIF entry in instance network info cache for port f159b639-986d-4584-94e0-589e890cb653. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 720.415759] env[69328]: DEBUG nova.network.neutron [req-8227ba52-5788-4a31-8a96-5223c46c3b07 req-604b957d-5111-4a3e-b519-71ea363362b7 service nova] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Updating instance_info_cache with network_info: [{"id": "f159b639-986d-4584-94e0-589e890cb653", "address": "fa:16:3e:78:ba:5e", "network": {"id": "b7b15f77-0584-4f19-a05e-67df3efe1b9d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-778653716-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8502178b3d334c338b63dfde3eae8f08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d275d7c6-2a7b-4ee8-b6f4-fabf1ba1905f", "external-id": "nsx-vlan-transportzone-513", "segmentation_id": 513, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf159b639-98", "ovs_interfaceid": "f159b639-986d-4584-94e0-589e890cb653", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 720.455712] env[69328]: DEBUG oslo_vmware.api [None req-f7e862ec-1d8a-45e3-bd24-c67aaf7f2c3e tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3273038, 'name': CloneVM_Task} progress is 94%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.474322] env[69328]: DEBUG oslo_vmware.api [None req-6f98f3ad-8c53-48d7-a859-d056e9f43bbd tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273039, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.162454} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.474599] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f98f3ad-8c53-48d7-a859-d056e9f43bbd tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 720.474852] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-6f98f3ad-8c53-48d7-a859-d056e9f43bbd tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 720.475107] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-6f98f3ad-8c53-48d7-a859-d056e9f43bbd tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 720.475291] env[69328]: INFO nova.compute.manager [None req-6f98f3ad-8c53-48d7-a859-d056e9f43bbd tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Took 1.35 seconds to destroy the instance on the hypervisor. [ 720.475549] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6f98f3ad-8c53-48d7-a859-d056e9f43bbd tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 720.475833] env[69328]: DEBUG nova.compute.manager [-] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 720.475957] env[69328]: DEBUG nova.network.neutron [-] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 720.590710] env[69328]: DEBUG oslo_vmware.api [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]520cac5f-81fe-6c3f-4f58-4e4796d9d614, 'name': SearchDatastore_Task, 'duration_secs': 0.009044} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.591541] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba82fc9c-0c63-4414-bfef-6d81d152639c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.596774] env[69328]: DEBUG oslo_vmware.api [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Waiting for the task: (returnval){ [ 720.596774] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f911c6-a6f4-0537-2b5b-f0da3b2e5a24" [ 720.596774] env[69328]: _type = "Task" [ 720.596774] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.605104] env[69328]: DEBUG oslo_vmware.api [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f911c6-a6f4-0537-2b5b-f0da3b2e5a24, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.620213] env[69328]: DEBUG oslo_vmware.api [None req-7594ffc6-3bcd-41fa-867d-b0ee1a8dc8cd tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273040, 'name': PowerOffVM_Task, 'duration_secs': 0.343472} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.620546] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7594ffc6-3bcd-41fa-867d-b0ee1a8dc8cd tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 720.620637] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7594ffc6-3bcd-41fa-867d-b0ee1a8dc8cd tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 720.620939] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1d9ace79-1e78-4a0c-8e03-1805911ec38a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.633875] env[69328]: DEBUG oslo_concurrency.lockutils [req-38639a3f-a813-4da6-9345-0d40c6318d26 req-20145f02-98a8-4281-9623-51fafc3f460d service nova] Releasing lock "refresh_cache-146a3eef-0971-4f6e-bd24-58b38a1de0ed" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 720.686421] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7594ffc6-3bcd-41fa-867d-b0ee1a8dc8cd tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 720.686651] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7594ffc6-3bcd-41fa-867d-b0ee1a8dc8cd tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 720.686829] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-7594ffc6-3bcd-41fa-867d-b0ee1a8dc8cd tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Deleting the datastore file [datastore1] 8e3a73c1-b622-47f4-99af-71b6dba7c09b {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 720.687112] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e1edfc5a-247a-4a6d-8b4f-a21819d158bb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.693772] env[69328]: DEBUG oslo_vmware.api [None req-7594ffc6-3bcd-41fa-867d-b0ee1a8dc8cd tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Waiting for the task: (returnval){ [ 720.693772] env[69328]: value = "task-3273042" [ 720.693772] env[69328]: _type = "Task" [ 720.693772] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.704746] env[69328]: DEBUG oslo_vmware.api [None req-7594ffc6-3bcd-41fa-867d-b0ee1a8dc8cd tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273042, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.853388] env[69328]: DEBUG nova.objects.instance [None req-0636358c-b911-4573-b470-aef12f83ed4f tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lazy-loading 'flavor' on Instance uuid bc9c3a41-7264-4d69-bc15-397b5fa0a8ad {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 720.891837] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.680s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 720.892418] env[69328]: DEBUG nova.compute.manager [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 720.898669] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5f40360e-b9a9-483b-a7da-7a56b4040d64 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.106s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 720.898898] env[69328]: DEBUG nova.objects.instance [None req-5f40360e-b9a9-483b-a7da-7a56b4040d64 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Lazy-loading 'resources' on Instance uuid 84baf472-6eb5-4c92-98eb-e35c14bca4e2 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 720.901195] env[69328]: DEBUG nova.compute.manager [None req-57dec915-e838-459b-bd12-33b5b857e7af tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 720.901409] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-57dec915-e838-459b-bd12-33b5b857e7af tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 720.902620] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-100afd97-3045-4f04-951b-0fd19ca91137 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.908916] env[69328]: DEBUG oslo_vmware.api [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3273037, 'name': PowerOnVM_Task, 'duration_secs': 0.608097} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.909516] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 720.909727] env[69328]: INFO nova.compute.manager [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] Took 7.41 seconds to spawn the instance on the hypervisor. [ 720.909904] env[69328]: DEBUG nova.compute.manager [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 720.910920] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6a1e03d-6441-4306-9da2-462f486df11f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.916118] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-57dec915-e838-459b-bd12-33b5b857e7af tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 720.916727] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-07533dba-ef2e-466b-8f33-6d5dc9069308 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.919920] env[69328]: DEBUG oslo_concurrency.lockutils [req-8227ba52-5788-4a31-8a96-5223c46c3b07 req-604b957d-5111-4a3e-b519-71ea363362b7 service nova] Releasing lock "refresh_cache-e1eec0ce-8df7-402a-b628-5dfdc11949e7" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 720.933237] env[69328]: DEBUG oslo_vmware.api [None req-57dec915-e838-459b-bd12-33b5b857e7af tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Waiting for the task: (returnval){ [ 720.933237] env[69328]: value = "task-3273043" [ 720.933237] env[69328]: _type = "Task" [ 720.933237] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.944713] env[69328]: DEBUG oslo_vmware.api [None req-57dec915-e838-459b-bd12-33b5b857e7af tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Task: {'id': task-3273043, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.955877] env[69328]: DEBUG oslo_vmware.api [None req-f7e862ec-1d8a-45e3-bd24-c67aaf7f2c3e tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3273038, 'name': CloneVM_Task} progress is 94%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.112472] env[69328]: DEBUG oslo_vmware.api [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f911c6-a6f4-0537-2b5b-f0da3b2e5a24, 'name': SearchDatastore_Task, 'duration_secs': 0.009928} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.112472] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 721.112472] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 146a3eef-0971-4f6e-bd24-58b38a1de0ed/146a3eef-0971-4f6e-bd24-58b38a1de0ed.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 721.112472] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c862ff1f-a89f-4720-912d-5bda66e1a706 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.122916] env[69328]: DEBUG oslo_vmware.api [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Waiting for the task: (returnval){ [ 721.122916] env[69328]: value = "task-3273044" [ 721.122916] env[69328]: _type = "Task" [ 721.122916] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.136993] env[69328]: DEBUG oslo_vmware.api [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3273044, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.207516] env[69328]: DEBUG oslo_vmware.api [None req-7594ffc6-3bcd-41fa-867d-b0ee1a8dc8cd tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273042, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.134427} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.208159] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-7594ffc6-3bcd-41fa-867d-b0ee1a8dc8cd tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 721.208159] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7594ffc6-3bcd-41fa-867d-b0ee1a8dc8cd tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 721.208448] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7594ffc6-3bcd-41fa-867d-b0ee1a8dc8cd tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 721.210318] env[69328]: INFO nova.compute.manager [None req-7594ffc6-3bcd-41fa-867d-b0ee1a8dc8cd tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Took 1.11 seconds to destroy the instance on the hypervisor. [ 721.211063] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7594ffc6-3bcd-41fa-867d-b0ee1a8dc8cd tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 721.211063] env[69328]: DEBUG nova.compute.manager [-] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 721.211063] env[69328]: DEBUG nova.network.neutron [-] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 721.275491] env[69328]: DEBUG nova.network.neutron [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] Successfully updated port: a1ba4448-3cab-4866-81f4-785bd26580b2 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 721.357670] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0636358c-b911-4573-b470-aef12f83ed4f tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "bc9c3a41-7264-4d69-bc15-397b5fa0a8ad" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.853s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 721.403839] env[69328]: DEBUG nova.compute.utils [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 721.408422] env[69328]: DEBUG nova.compute.manager [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 721.409350] env[69328]: DEBUG nova.network.neutron [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 721.439077] env[69328]: INFO nova.compute.manager [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] Took 48.10 seconds to build instance. [ 721.448627] env[69328]: DEBUG oslo_vmware.api [None req-57dec915-e838-459b-bd12-33b5b857e7af tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Task: {'id': task-3273043, 'name': PowerOffVM_Task, 'duration_secs': 0.180531} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.453486] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-57dec915-e838-459b-bd12-33b5b857e7af tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 721.453891] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-57dec915-e838-459b-bd12-33b5b857e7af tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 721.455078] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-43caed5b-0170-4e40-b62a-bc97d44eb698 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.464179] env[69328]: DEBUG oslo_vmware.api [None req-f7e862ec-1d8a-45e3-bd24-c67aaf7f2c3e tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3273038, 'name': CloneVM_Task, 'duration_secs': 1.274151} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.464602] env[69328]: INFO nova.virt.vmwareapi.vmops [None req-f7e862ec-1d8a-45e3-bd24-c67aaf7f2c3e tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Created linked-clone VM from snapshot [ 721.465701] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a48a5d13-0ba3-40a5-8e03-0cb1e8ab337f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.474046] env[69328]: DEBUG nova.policy [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2bed96e770e24280a820f4bbb20ead8b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '243d2ced749742d8883cfa7d2ec07725', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 721.482401] env[69328]: DEBUG nova.virt.vmwareapi.images [None req-f7e862ec-1d8a-45e3-bd24-c67aaf7f2c3e tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Uploading image e1e4d75f-9329-4173-b193-da66aa2bc3ab {{(pid=69328) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 721.527149] env[69328]: DEBUG oslo_vmware.rw_handles [None req-f7e862ec-1d8a-45e3-bd24-c67aaf7f2c3e tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 721.527149] env[69328]: value = "vm-653760" [ 721.527149] env[69328]: _type = "VirtualMachine" [ 721.527149] env[69328]: }. {{(pid=69328) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 721.527558] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-1c1b0e7d-b4c4-4b00-a572-6df1164a0a6f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.533737] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-57dec915-e838-459b-bd12-33b5b857e7af tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 721.533998] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-57dec915-e838-459b-bd12-33b5b857e7af tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 721.534200] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-57dec915-e838-459b-bd12-33b5b857e7af tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Deleting the datastore file [datastore2] e1eec0ce-8df7-402a-b628-5dfdc11949e7 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 721.538222] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6a0c4a02-7226-4b79-a37e-97cac39e15ef {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.549457] env[69328]: DEBUG nova.network.neutron [-] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 721.551306] env[69328]: DEBUG oslo_vmware.rw_handles [None req-f7e862ec-1d8a-45e3-bd24-c67aaf7f2c3e tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Lease: (returnval){ [ 721.551306] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d4939e-2ca5-b321-bcb1-d3183d8215aa" [ 721.551306] env[69328]: _type = "HttpNfcLease" [ 721.551306] env[69328]: } obtained for exporting VM: (result){ [ 721.551306] env[69328]: value = "vm-653760" [ 721.551306] env[69328]: _type = "VirtualMachine" [ 721.551306] env[69328]: }. {{(pid=69328) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 721.551607] env[69328]: DEBUG oslo_vmware.api [None req-f7e862ec-1d8a-45e3-bd24-c67aaf7f2c3e tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Waiting for the lease: (returnval){ [ 721.551607] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d4939e-2ca5-b321-bcb1-d3183d8215aa" [ 721.551607] env[69328]: _type = "HttpNfcLease" [ 721.551607] env[69328]: } to be ready. {{(pid=69328) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 721.555477] env[69328]: DEBUG nova.compute.manager [req-68714aaa-bdd2-4151-8e97-c0b2eaef5edd req-f134abe9-0734-4f49-b100-1279ac6eac9c service nova] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] Received event network-vif-plugged-a1ba4448-3cab-4866-81f4-785bd26580b2 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 721.555477] env[69328]: DEBUG oslo_concurrency.lockutils [req-68714aaa-bdd2-4151-8e97-c0b2eaef5edd req-f134abe9-0734-4f49-b100-1279ac6eac9c service nova] Acquiring lock "4c54c0dd-32f1-4d35-b770-3e1a540c54a7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 721.555477] env[69328]: DEBUG oslo_concurrency.lockutils [req-68714aaa-bdd2-4151-8e97-c0b2eaef5edd req-f134abe9-0734-4f49-b100-1279ac6eac9c service nova] Lock "4c54c0dd-32f1-4d35-b770-3e1a540c54a7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 721.555580] env[69328]: DEBUG oslo_concurrency.lockutils [req-68714aaa-bdd2-4151-8e97-c0b2eaef5edd req-f134abe9-0734-4f49-b100-1279ac6eac9c service nova] Lock "4c54c0dd-32f1-4d35-b770-3e1a540c54a7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 721.555732] env[69328]: DEBUG nova.compute.manager [req-68714aaa-bdd2-4151-8e97-c0b2eaef5edd req-f134abe9-0734-4f49-b100-1279ac6eac9c service nova] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] No waiting events found dispatching network-vif-plugged-a1ba4448-3cab-4866-81f4-785bd26580b2 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 721.555907] env[69328]: WARNING nova.compute.manager [req-68714aaa-bdd2-4151-8e97-c0b2eaef5edd req-f134abe9-0734-4f49-b100-1279ac6eac9c service nova] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] Received unexpected event network-vif-plugged-a1ba4448-3cab-4866-81f4-785bd26580b2 for instance with vm_state building and task_state spawning. [ 721.556188] env[69328]: DEBUG nova.compute.manager [req-68714aaa-bdd2-4151-8e97-c0b2eaef5edd req-f134abe9-0734-4f49-b100-1279ac6eac9c service nova] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] Received event network-changed-a1ba4448-3cab-4866-81f4-785bd26580b2 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 721.556332] env[69328]: DEBUG nova.compute.manager [req-68714aaa-bdd2-4151-8e97-c0b2eaef5edd req-f134abe9-0734-4f49-b100-1279ac6eac9c service nova] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] Refreshing instance network info cache due to event network-changed-a1ba4448-3cab-4866-81f4-785bd26580b2. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 721.556524] env[69328]: DEBUG oslo_concurrency.lockutils [req-68714aaa-bdd2-4151-8e97-c0b2eaef5edd req-f134abe9-0734-4f49-b100-1279ac6eac9c service nova] Acquiring lock "refresh_cache-4c54c0dd-32f1-4d35-b770-3e1a540c54a7" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 721.556663] env[69328]: DEBUG oslo_concurrency.lockutils [req-68714aaa-bdd2-4151-8e97-c0b2eaef5edd req-f134abe9-0734-4f49-b100-1279ac6eac9c service nova] Acquired lock "refresh_cache-4c54c0dd-32f1-4d35-b770-3e1a540c54a7" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 721.556818] env[69328]: DEBUG nova.network.neutron [req-68714aaa-bdd2-4151-8e97-c0b2eaef5edd req-f134abe9-0734-4f49-b100-1279ac6eac9c service nova] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] Refreshing network info cache for port a1ba4448-3cab-4866-81f4-785bd26580b2 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 721.564195] env[69328]: DEBUG oslo_vmware.api [None req-57dec915-e838-459b-bd12-33b5b857e7af tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Waiting for the task: (returnval){ [ 721.564195] env[69328]: value = "task-3273047" [ 721.564195] env[69328]: _type = "Task" [ 721.564195] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.571776] env[69328]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 721.571776] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d4939e-2ca5-b321-bcb1-d3183d8215aa" [ 721.571776] env[69328]: _type = "HttpNfcLease" [ 721.571776] env[69328]: } is ready. {{(pid=69328) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 721.587280] env[69328]: DEBUG oslo_vmware.rw_handles [None req-f7e862ec-1d8a-45e3-bd24-c67aaf7f2c3e tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 721.587280] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d4939e-2ca5-b321-bcb1-d3183d8215aa" [ 721.587280] env[69328]: _type = "HttpNfcLease" [ 721.587280] env[69328]: }. {{(pid=69328) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 721.587280] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-897d6cd5-8e6b-477c-8b06-2de5a0adb4bc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.587701] env[69328]: DEBUG oslo_vmware.api [None req-57dec915-e838-459b-bd12-33b5b857e7af tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Task: {'id': task-3273047, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.595678] env[69328]: DEBUG oslo_vmware.rw_handles [None req-f7e862ec-1d8a-45e3-bd24-c67aaf7f2c3e tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52679d73-e965-e6eb-803b-6ff1bcc7fe65/disk-0.vmdk from lease info. {{(pid=69328) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 721.595989] env[69328]: DEBUG oslo_vmware.rw_handles [None req-f7e862ec-1d8a-45e3-bd24-c67aaf7f2c3e tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52679d73-e965-e6eb-803b-6ff1bcc7fe65/disk-0.vmdk for reading. {{(pid=69328) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 721.674234] env[69328]: DEBUG oslo_vmware.api [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3273044, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.52523} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.674234] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 146a3eef-0971-4f6e-bd24-58b38a1de0ed/146a3eef-0971-4f6e-bd24-58b38a1de0ed.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 721.674234] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 721.674234] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-47e7f8ed-f1a7-4edf-a267-1a7a0768ce5e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.681068] env[69328]: DEBUG oslo_vmware.api [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Waiting for the task: (returnval){ [ 721.681068] env[69328]: value = "task-3273048" [ 721.681068] env[69328]: _type = "Task" [ 721.681068] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.693575] env[69328]: DEBUG oslo_vmware.api [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3273048, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.735459] env[69328]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-eb22aad5-b885-426a-b2c1-375a86d91a69 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.778160] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Acquiring lock "refresh_cache-4c54c0dd-32f1-4d35-b770-3e1a540c54a7" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 721.823694] env[69328]: DEBUG nova.compute.manager [req-fd6be2b4-4f71-44a4-bbf0-fb32712c929b req-c282ebb0-405b-4621-80e1-d7a7e2ab44f3 service nova] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Received event network-vif-deleted-67a5c2b8-cfa7-474e-91f4-f5b16fab46ca {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 721.824045] env[69328]: DEBUG nova.compute.manager [req-fd6be2b4-4f71-44a4-bbf0-fb32712c929b req-c282ebb0-405b-4621-80e1-d7a7e2ab44f3 service nova] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Received event network-vif-deleted-09f50ba2-a927-40b1-a70f-37f75fbfd5ed {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 721.824353] env[69328]: INFO nova.compute.manager [req-fd6be2b4-4f71-44a4-bbf0-fb32712c929b req-c282ebb0-405b-4621-80e1-d7a7e2ab44f3 service nova] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Neutron deleted interface 09f50ba2-a927-40b1-a70f-37f75fbfd5ed; detaching it from the instance and deleting it from the info cache [ 721.825088] env[69328]: DEBUG nova.network.neutron [req-fd6be2b4-4f71-44a4-bbf0-fb32712c929b req-c282ebb0-405b-4621-80e1-d7a7e2ab44f3 service nova] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 721.915023] env[69328]: DEBUG nova.compute.manager [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 721.942226] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Lock "1e7e9e6e-c084-480c-8653-8441c13d7514" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.198s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 722.052373] env[69328]: DEBUG nova.network.neutron [-] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 722.062676] env[69328]: INFO nova.compute.manager [-] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Took 1.59 seconds to deallocate network for instance. [ 722.077171] env[69328]: DEBUG nova.network.neutron [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] Successfully created port: c9eb2309-c7e0-43ff-91b1-763055d9381a {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 722.092523] env[69328]: DEBUG oslo_vmware.api [None req-57dec915-e838-459b-bd12-33b5b857e7af tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Task: {'id': task-3273047, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.211057} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.093431] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-57dec915-e838-459b-bd12-33b5b857e7af tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 722.093431] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-57dec915-e838-459b-bd12-33b5b857e7af tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 722.093540] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-57dec915-e838-459b-bd12-33b5b857e7af tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 722.093642] env[69328]: INFO nova.compute.manager [None req-57dec915-e838-459b-bd12-33b5b857e7af tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Took 1.19 seconds to destroy the instance on the hypervisor. [ 722.093835] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-57dec915-e838-459b-bd12-33b5b857e7af tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 722.094031] env[69328]: DEBUG nova.compute.manager [-] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 722.094121] env[69328]: DEBUG nova.network.neutron [-] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 722.120144] env[69328]: DEBUG nova.network.neutron [req-68714aaa-bdd2-4151-8e97-c0b2eaef5edd req-f134abe9-0734-4f49-b100-1279ac6eac9c service nova] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 722.137442] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39dbccfa-bec5-49e1-97a1-0c2e94a8c252 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.149888] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e722205-f256-4214-a94e-8cd7120509bf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.193738] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-978891d8-e673-4083-ae74-7aaf27ced66b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.202801] env[69328]: DEBUG oslo_vmware.api [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3273048, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.152926} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.205292] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 722.207033] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-532dc0e8-8650-4615-b47e-004dbd04e344 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.209837] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-943302f8-8309-434a-acda-5fdd930a935f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.227207] env[69328]: DEBUG nova.compute.provider_tree [None req-5f40360e-b9a9-483b-a7da-7a56b4040d64 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 722.232778] env[69328]: DEBUG nova.network.neutron [req-68714aaa-bdd2-4151-8e97-c0b2eaef5edd req-f134abe9-0734-4f49-b100-1279ac6eac9c service nova] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 722.249442] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] Reconfiguring VM instance instance-00000021 to attach disk [datastore2] 146a3eef-0971-4f6e-bd24-58b38a1de0ed/146a3eef-0971-4f6e-bd24-58b38a1de0ed.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 722.251494] env[69328]: DEBUG oslo_concurrency.lockutils [req-68714aaa-bdd2-4151-8e97-c0b2eaef5edd req-f134abe9-0734-4f49-b100-1279ac6eac9c service nova] Releasing lock "refresh_cache-4c54c0dd-32f1-4d35-b770-3e1a540c54a7" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 722.252833] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-870d2c96-e8d7-43af-8a39-960e6c31b8be {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.267159] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Acquired lock "refresh_cache-4c54c0dd-32f1-4d35-b770-3e1a540c54a7" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 722.267458] env[69328]: DEBUG nova.network.neutron [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 722.275025] env[69328]: DEBUG oslo_vmware.api [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Waiting for the task: (returnval){ [ 722.275025] env[69328]: value = "task-3273049" [ 722.275025] env[69328]: _type = "Task" [ 722.275025] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.284797] env[69328]: DEBUG oslo_vmware.api [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3273049, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.328432] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-49a74047-a592-4c49-8a4c-3d46451d4533 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.338585] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcab3031-c0a7-4856-81bc-cb982b89a8fe {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.379254] env[69328]: DEBUG nova.compute.manager [req-fd6be2b4-4f71-44a4-bbf0-fb32712c929b req-c282ebb0-405b-4621-80e1-d7a7e2ab44f3 service nova] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Detach interface failed, port_id=09f50ba2-a927-40b1-a70f-37f75fbfd5ed, reason: Instance 8e3a73c1-b622-47f4-99af-71b6dba7c09b could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 722.448370] env[69328]: DEBUG nova.compute.manager [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 722.557553] env[69328]: INFO nova.compute.manager [-] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Took 1.35 seconds to deallocate network for instance. [ 722.573813] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6f98f3ad-8c53-48d7-a859-d056e9f43bbd tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 722.769457] env[69328]: ERROR nova.scheduler.client.report [None req-5f40360e-b9a9-483b-a7da-7a56b4040d64 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] [req-6f7f21af-e27a-4621-99ec-198233219860] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-6f7f21af-e27a-4621-99ec-198233219860"}]} [ 722.785159] env[69328]: DEBUG oslo_vmware.api [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3273049, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.789835] env[69328]: DEBUG nova.scheduler.client.report [None req-5f40360e-b9a9-483b-a7da-7a56b4040d64 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Refreshing inventories for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 722.804757] env[69328]: DEBUG nova.scheduler.client.report [None req-5f40360e-b9a9-483b-a7da-7a56b4040d64 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Updating ProviderTree inventory for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 722.805869] env[69328]: DEBUG nova.compute.provider_tree [None req-5f40360e-b9a9-483b-a7da-7a56b4040d64 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 722.817228] env[69328]: DEBUG nova.scheduler.client.report [None req-5f40360e-b9a9-483b-a7da-7a56b4040d64 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Refreshing aggregate associations for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e, aggregates: None {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 722.820607] env[69328]: DEBUG nova.network.neutron [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 722.839614] env[69328]: DEBUG nova.scheduler.client.report [None req-5f40360e-b9a9-483b-a7da-7a56b4040d64 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Refreshing trait associations for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e, traits: COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 722.925222] env[69328]: DEBUG nova.compute.manager [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 722.953613] env[69328]: DEBUG nova.virt.hardware [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 722.954334] env[69328]: DEBUG nova.virt.hardware [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 722.954564] env[69328]: DEBUG nova.virt.hardware [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 722.954806] env[69328]: DEBUG nova.virt.hardware [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 722.954996] env[69328]: DEBUG nova.virt.hardware [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 722.955282] env[69328]: DEBUG nova.virt.hardware [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 722.955609] env[69328]: DEBUG nova.virt.hardware [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 722.955856] env[69328]: DEBUG nova.virt.hardware [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 722.956446] env[69328]: DEBUG nova.virt.hardware [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 722.956446] env[69328]: DEBUG nova.virt.hardware [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 722.956792] env[69328]: DEBUG nova.virt.hardware [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 722.960094] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bd27b9c-eb5a-4699-a83d-e59382d0dd6a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.981260] env[69328]: DEBUG oslo_concurrency.lockutils [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 722.983326] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8e62223-2583-49c5-adbd-c0760cfe3d50 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.019426] env[69328]: DEBUG nova.network.neutron [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] Updating instance_info_cache with network_info: [{"id": "a1ba4448-3cab-4866-81f4-785bd26580b2", "address": "fa:16:3e:60:e3:c3", "network": {"id": "4707aea3-ce46-4fe0-bf5c-141ee5596a86", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.52", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ecd184c6b78b4e4297fb93abb94aa37d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa1ba4448-3c", "ovs_interfaceid": "a1ba4448-3cab-4866-81f4-785bd26580b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 723.069162] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7594ffc6-3bcd-41fa-867d-b0ee1a8dc8cd tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 723.109740] env[69328]: DEBUG nova.network.neutron [-] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 723.157930] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquiring lock "4d320c76-45bb-451c-8fbb-3dd2d64f56d5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 723.158616] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "4d320c76-45bb-451c-8fbb-3dd2d64f56d5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 723.291268] env[69328]: DEBUG oslo_vmware.api [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3273049, 'name': ReconfigVM_Task, 'duration_secs': 0.63718} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.292029] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] Reconfigured VM instance instance-00000021 to attach disk [datastore2] 146a3eef-0971-4f6e-bd24-58b38a1de0ed/146a3eef-0971-4f6e-bd24-58b38a1de0ed.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 723.292789] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-75492946-e20b-4955-827b-42883f91d9b2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.301683] env[69328]: DEBUG oslo_vmware.api [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Waiting for the task: (returnval){ [ 723.301683] env[69328]: value = "task-3273050" [ 723.301683] env[69328]: _type = "Task" [ 723.301683] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.315121] env[69328]: DEBUG oslo_vmware.api [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3273050, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.393326] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14d7fa8e-4d96-4457-9aa1-0d188990f049 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.401194] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f99075b-6526-4793-bdca-d41df6a60502 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.432370] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-702846a0-9eac-4038-9b59-e86c929468c7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.440401] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99f0f1c2-8f2d-4e3b-ad7b-a193b05cad47 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.457141] env[69328]: DEBUG nova.compute.provider_tree [None req-5f40360e-b9a9-483b-a7da-7a56b4040d64 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 723.521074] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Releasing lock "refresh_cache-4c54c0dd-32f1-4d35-b770-3e1a540c54a7" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 723.521565] env[69328]: DEBUG nova.compute.manager [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] Instance network_info: |[{"id": "a1ba4448-3cab-4866-81f4-785bd26580b2", "address": "fa:16:3e:60:e3:c3", "network": {"id": "4707aea3-ce46-4fe0-bf5c-141ee5596a86", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.52", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ecd184c6b78b4e4297fb93abb94aa37d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa1ba4448-3c", "ovs_interfaceid": "a1ba4448-3cab-4866-81f4-785bd26580b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 723.522167] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:60:e3:c3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a8b99a46-3e7f-4ef1-9e45-58e6cd17f210', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a1ba4448-3cab-4866-81f4-785bd26580b2', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 723.531266] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 723.531969] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 723.532292] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7eeb3c17-5078-42df-9af9-942db6f4fbe1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.552501] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 723.552501] env[69328]: value = "task-3273051" [ 723.552501] env[69328]: _type = "Task" [ 723.552501] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.560852] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273051, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.616642] env[69328]: INFO nova.compute.manager [-] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Took 1.52 seconds to deallocate network for instance. [ 723.645649] env[69328]: DEBUG nova.compute.manager [req-2ea85f58-ff2e-4ed3-adb6-da63454841f0 req-115026b6-fb3d-44e4-a002-84b6ee4c0f77 service nova] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Received event network-vif-deleted-f159b639-986d-4584-94e0-589e890cb653 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 723.813329] env[69328]: DEBUG oslo_vmware.api [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3273050, 'name': Rename_Task, 'duration_secs': 0.240809} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.814027] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 723.814338] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-01d28aa3-4076-495f-bfa4-5d0cee7da068 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.822749] env[69328]: DEBUG oslo_vmware.api [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Waiting for the task: (returnval){ [ 723.822749] env[69328]: value = "task-3273052" [ 723.822749] env[69328]: _type = "Task" [ 723.822749] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.831749] env[69328]: DEBUG oslo_vmware.api [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3273052, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.999640] env[69328]: DEBUG nova.scheduler.client.report [None req-5f40360e-b9a9-483b-a7da-7a56b4040d64 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Updated inventory for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with generation 64 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 723.999942] env[69328]: DEBUG nova.compute.provider_tree [None req-5f40360e-b9a9-483b-a7da-7a56b4040d64 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Updating resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e generation from 64 to 65 during operation: update_inventory {{(pid=69328) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 724.000156] env[69328]: DEBUG nova.compute.provider_tree [None req-5f40360e-b9a9-483b-a7da-7a56b4040d64 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 724.017443] env[69328]: DEBUG nova.network.neutron [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] Successfully updated port: c9eb2309-c7e0-43ff-91b1-763055d9381a {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 724.063820] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273051, 'name': CreateVM_Task, 'duration_secs': 0.359619} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.064038] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 724.064851] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 724.065086] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 724.065724] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 724.066029] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-63698755-a498-4dcb-b74b-d9c78ae54700 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.071334] env[69328]: DEBUG oslo_vmware.api [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Waiting for the task: (returnval){ [ 724.071334] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5219d4ba-8971-7b8f-c7cd-290e40d05b3e" [ 724.071334] env[69328]: _type = "Task" [ 724.071334] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.079814] env[69328]: DEBUG oslo_vmware.api [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5219d4ba-8971-7b8f-c7cd-290e40d05b3e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.124790] env[69328]: DEBUG oslo_concurrency.lockutils [None req-57dec915-e838-459b-bd12-33b5b857e7af tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 724.334656] env[69328]: DEBUG oslo_vmware.api [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3273052, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.508155] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5f40360e-b9a9-483b-a7da-7a56b4040d64 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.609s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 724.511134] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.670s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 724.513241] env[69328]: INFO nova.compute.claims [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 724.522925] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Acquiring lock "refresh_cache-e5d3df12-5334-44c8-9a44-1674e57918bb" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 724.522989] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Acquired lock "refresh_cache-e5d3df12-5334-44c8-9a44-1674e57918bb" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 724.523144] env[69328]: DEBUG nova.network.neutron [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 724.529895] env[69328]: INFO nova.scheduler.client.report [None req-5f40360e-b9a9-483b-a7da-7a56b4040d64 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Deleted allocations for instance 84baf472-6eb5-4c92-98eb-e35c14bca4e2 [ 724.584512] env[69328]: DEBUG oslo_vmware.api [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5219d4ba-8971-7b8f-c7cd-290e40d05b3e, 'name': SearchDatastore_Task, 'duration_secs': 0.013843} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.584809] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 724.585070] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 724.585326] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 724.585503] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 724.585699] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 724.586234] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ceb09409-fa65-4b3a-b6ad-51de848efb54 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.595653] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 724.595816] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 724.596527] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae02166d-c15d-44dd-839e-6c8a1fbb8ce7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.604316] env[69328]: DEBUG oslo_vmware.api [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Waiting for the task: (returnval){ [ 724.604316] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d0de53-b6ce-c4b2-8858-066d8cd7055f" [ 724.604316] env[69328]: _type = "Task" [ 724.604316] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.612805] env[69328]: DEBUG oslo_vmware.api [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d0de53-b6ce-c4b2-8858-066d8cd7055f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.838684] env[69328]: DEBUG oslo_vmware.api [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3273052, 'name': PowerOnVM_Task, 'duration_secs': 0.670142} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.839085] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 724.839346] env[69328]: INFO nova.compute.manager [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] Took 8.62 seconds to spawn the instance on the hypervisor. [ 724.839576] env[69328]: DEBUG nova.compute.manager [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 724.840478] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d534da99-bcde-49a2-a8dc-496ab492f8ce {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.849382] env[69328]: DEBUG nova.objects.instance [None req-d6c0f9fc-d096-40eb-b1e7-8a646001a644 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Lazy-loading 'flavor' on Instance uuid 46526210-2783-408d-9ecb-773f33ff0c66 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 725.038227] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5f40360e-b9a9-483b-a7da-7a56b4040d64 tempest-ServerDiagnosticsNegativeTest-729250574 tempest-ServerDiagnosticsNegativeTest-729250574-project-member] Lock "84baf472-6eb5-4c92-98eb-e35c14bca4e2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 47.043s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 725.066870] env[69328]: DEBUG nova.network.neutron [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 725.115832] env[69328]: DEBUG oslo_vmware.api [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d0de53-b6ce-c4b2-8858-066d8cd7055f, 'name': SearchDatastore_Task, 'duration_secs': 0.009823} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.116652] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cff3ec16-c0f2-49ee-84ad-b0a1dd6ef3e6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.122202] env[69328]: DEBUG oslo_vmware.api [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Waiting for the task: (returnval){ [ 725.122202] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52c613e1-6f33-c9c3-b139-e3dee74f429a" [ 725.122202] env[69328]: _type = "Task" [ 725.122202] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.132800] env[69328]: DEBUG oslo_vmware.api [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52c613e1-6f33-c9c3-b139-e3dee74f429a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.266756] env[69328]: DEBUG nova.network.neutron [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] Updating instance_info_cache with network_info: [{"id": "c9eb2309-c7e0-43ff-91b1-763055d9381a", "address": "fa:16:3e:b9:56:4a", "network": {"id": "2cd453d6-8f32-4d06-ab12-31048ff144e9", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-441849598-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "243d2ced749742d8883cfa7d2ec07725", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afb671bc-328c-40bf-9c2a-d98695e3d60c", "external-id": "nsx-vlan-transportzone-920", "segmentation_id": 920, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9eb2309-c7", "ovs_interfaceid": "c9eb2309-c7e0-43ff-91b1-763055d9381a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 725.362655] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d6c0f9fc-d096-40eb-b1e7-8a646001a644 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Acquiring lock "refresh_cache-46526210-2783-408d-9ecb-773f33ff0c66" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 725.362838] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d6c0f9fc-d096-40eb-b1e7-8a646001a644 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Acquired lock "refresh_cache-46526210-2783-408d-9ecb-773f33ff0c66" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 725.367152] env[69328]: INFO nova.compute.manager [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] Took 46.73 seconds to build instance. [ 725.635690] env[69328]: DEBUG oslo_vmware.api [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52c613e1-6f33-c9c3-b139-e3dee74f429a, 'name': SearchDatastore_Task, 'duration_secs': 0.011096} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.635992] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 725.636278] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 4c54c0dd-32f1-4d35-b770-3e1a540c54a7/4c54c0dd-32f1-4d35-b770-3e1a540c54a7.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 725.636587] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1c1b1d78-90fa-4b1c-869d-9daccb109f77 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.646585] env[69328]: DEBUG oslo_vmware.api [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Waiting for the task: (returnval){ [ 725.646585] env[69328]: value = "task-3273053" [ 725.646585] env[69328]: _type = "Task" [ 725.646585] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.656300] env[69328]: DEBUG oslo_vmware.api [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Task: {'id': task-3273053, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.686150] env[69328]: DEBUG nova.compute.manager [req-9e8427bf-d46a-445f-abb3-21588c485aaf req-e1871e5e-5f0e-44c0-8931-2f08fab53ad4 service nova] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] Received event network-vif-plugged-c9eb2309-c7e0-43ff-91b1-763055d9381a {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 725.686394] env[69328]: DEBUG oslo_concurrency.lockutils [req-9e8427bf-d46a-445f-abb3-21588c485aaf req-e1871e5e-5f0e-44c0-8931-2f08fab53ad4 service nova] Acquiring lock "e5d3df12-5334-44c8-9a44-1674e57918bb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 725.686620] env[69328]: DEBUG oslo_concurrency.lockutils [req-9e8427bf-d46a-445f-abb3-21588c485aaf req-e1871e5e-5f0e-44c0-8931-2f08fab53ad4 service nova] Lock "e5d3df12-5334-44c8-9a44-1674e57918bb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 725.686865] env[69328]: DEBUG oslo_concurrency.lockutils [req-9e8427bf-d46a-445f-abb3-21588c485aaf req-e1871e5e-5f0e-44c0-8931-2f08fab53ad4 service nova] Lock "e5d3df12-5334-44c8-9a44-1674e57918bb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 725.687062] env[69328]: DEBUG nova.compute.manager [req-9e8427bf-d46a-445f-abb3-21588c485aaf req-e1871e5e-5f0e-44c0-8931-2f08fab53ad4 service nova] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] No waiting events found dispatching network-vif-plugged-c9eb2309-c7e0-43ff-91b1-763055d9381a {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 725.687230] env[69328]: WARNING nova.compute.manager [req-9e8427bf-d46a-445f-abb3-21588c485aaf req-e1871e5e-5f0e-44c0-8931-2f08fab53ad4 service nova] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] Received unexpected event network-vif-plugged-c9eb2309-c7e0-43ff-91b1-763055d9381a for instance with vm_state building and task_state spawning. [ 725.687407] env[69328]: DEBUG nova.compute.manager [req-9e8427bf-d46a-445f-abb3-21588c485aaf req-e1871e5e-5f0e-44c0-8931-2f08fab53ad4 service nova] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] Received event network-changed-c9eb2309-c7e0-43ff-91b1-763055d9381a {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 725.690374] env[69328]: DEBUG nova.compute.manager [req-9e8427bf-d46a-445f-abb3-21588c485aaf req-e1871e5e-5f0e-44c0-8931-2f08fab53ad4 service nova] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] Refreshing instance network info cache due to event network-changed-c9eb2309-c7e0-43ff-91b1-763055d9381a. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 725.690374] env[69328]: DEBUG oslo_concurrency.lockutils [req-9e8427bf-d46a-445f-abb3-21588c485aaf req-e1871e5e-5f0e-44c0-8931-2f08fab53ad4 service nova] Acquiring lock "refresh_cache-e5d3df12-5334-44c8-9a44-1674e57918bb" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 725.769897] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Releasing lock "refresh_cache-e5d3df12-5334-44c8-9a44-1674e57918bb" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 725.770307] env[69328]: DEBUG nova.compute.manager [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] Instance network_info: |[{"id": "c9eb2309-c7e0-43ff-91b1-763055d9381a", "address": "fa:16:3e:b9:56:4a", "network": {"id": "2cd453d6-8f32-4d06-ab12-31048ff144e9", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-441849598-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "243d2ced749742d8883cfa7d2ec07725", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afb671bc-328c-40bf-9c2a-d98695e3d60c", "external-id": "nsx-vlan-transportzone-920", "segmentation_id": 920, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9eb2309-c7", "ovs_interfaceid": "c9eb2309-c7e0-43ff-91b1-763055d9381a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 725.770640] env[69328]: DEBUG oslo_concurrency.lockutils [req-9e8427bf-d46a-445f-abb3-21588c485aaf req-e1871e5e-5f0e-44c0-8931-2f08fab53ad4 service nova] Acquired lock "refresh_cache-e5d3df12-5334-44c8-9a44-1674e57918bb" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 725.770876] env[69328]: DEBUG nova.network.neutron [req-9e8427bf-d46a-445f-abb3-21588c485aaf req-e1871e5e-5f0e-44c0-8931-2f08fab53ad4 service nova] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] Refreshing network info cache for port c9eb2309-c7e0-43ff-91b1-763055d9381a {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 725.772195] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b9:56:4a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'afb671bc-328c-40bf-9c2a-d98695e3d60c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c9eb2309-c7e0-43ff-91b1-763055d9381a', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 725.785266] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Creating folder: Project (243d2ced749742d8883cfa7d2ec07725). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 725.790640] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a581b17a-3a3c-4a11-8b92-c35b6b0763cc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.804735] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Created folder: Project (243d2ced749742d8883cfa7d2ec07725) in parent group-v653649. [ 725.805029] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Creating folder: Instances. Parent ref: group-v653762. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 725.805311] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-76638e93-0133-4185-909b-454f829cb940 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.822159] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Created folder: Instances in parent group-v653762. [ 725.822465] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 725.822682] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 725.822933] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-48b47bef-99b1-444e-85d9-3262d5ce29de {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.847623] env[69328]: DEBUG nova.network.neutron [None req-d6c0f9fc-d096-40eb-b1e7-8a646001a644 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 725.850344] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 725.850344] env[69328]: value = "task-3273056" [ 725.850344] env[69328]: _type = "Task" [ 725.850344] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.863947] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273056, 'name': CreateVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.868810] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7cc23e0b-bf5c-4646-9cc3-6433a2840dc7 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Lock "146a3eef-0971-4f6e-bd24-58b38a1de0ed" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 81.096s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 726.158337] env[69328]: DEBUG oslo_vmware.api [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Task: {'id': task-3273053, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.481401} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.158644] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 4c54c0dd-32f1-4d35-b770-3e1a540c54a7/4c54c0dd-32f1-4d35-b770-3e1a540c54a7.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 726.158788] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 726.163788] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0c0be1d5-f013-4798-9b8e-5740ed0ba874 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.170505] env[69328]: DEBUG oslo_vmware.api [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Waiting for the task: (returnval){ [ 726.170505] env[69328]: value = "task-3273057" [ 726.170505] env[69328]: _type = "Task" [ 726.170505] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.181794] env[69328]: DEBUG oslo_vmware.api [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Task: {'id': task-3273057, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.219981] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5af3a1c8-144b-46b2-b8bf-73c12c3620c0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.230681] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2900d743-6789-4b2f-925e-f52a3fde8414 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.265262] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91b417f0-76bb-4a9e-b14b-c255ab1d6d32 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.274137] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ae78293-47f2-4b62-aaed-0fb98264b837 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.291941] env[69328]: DEBUG nova.compute.provider_tree [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 726.367994] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273056, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.372692] env[69328]: DEBUG nova.compute.manager [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 726.686901] env[69328]: DEBUG oslo_vmware.api [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Task: {'id': task-3273057, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074572} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.690349] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 726.693270] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbd0ecb3-bc8f-4592-b038-abd157db0f0d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.715705] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] Reconfiguring VM instance instance-00000022 to attach disk [datastore1] 4c54c0dd-32f1-4d35-b770-3e1a540c54a7/4c54c0dd-32f1-4d35-b770-3e1a540c54a7.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 726.715968] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5eecfe89-0a4c-472b-a8cb-f1a24450b2a1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.735660] env[69328]: DEBUG oslo_vmware.api [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Waiting for the task: (returnval){ [ 726.735660] env[69328]: value = "task-3273058" [ 726.735660] env[69328]: _type = "Task" [ 726.735660] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.746362] env[69328]: DEBUG oslo_vmware.api [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Task: {'id': task-3273058, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.754364] env[69328]: DEBUG nova.network.neutron [req-9e8427bf-d46a-445f-abb3-21588c485aaf req-e1871e5e-5f0e-44c0-8931-2f08fab53ad4 service nova] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] Updated VIF entry in instance network info cache for port c9eb2309-c7e0-43ff-91b1-763055d9381a. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 726.754712] env[69328]: DEBUG nova.network.neutron [req-9e8427bf-d46a-445f-abb3-21588c485aaf req-e1871e5e-5f0e-44c0-8931-2f08fab53ad4 service nova] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] Updating instance_info_cache with network_info: [{"id": "c9eb2309-c7e0-43ff-91b1-763055d9381a", "address": "fa:16:3e:b9:56:4a", "network": {"id": "2cd453d6-8f32-4d06-ab12-31048ff144e9", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-441849598-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "243d2ced749742d8883cfa7d2ec07725", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afb671bc-328c-40bf-9c2a-d98695e3d60c", "external-id": "nsx-vlan-transportzone-920", "segmentation_id": 920, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9eb2309-c7", "ovs_interfaceid": "c9eb2309-c7e0-43ff-91b1-763055d9381a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 726.791966] env[69328]: DEBUG nova.network.neutron [None req-d6c0f9fc-d096-40eb-b1e7-8a646001a644 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Updating instance_info_cache with network_info: [{"id": "a95af8f2-189b-449d-974d-b380402c6a3f", "address": "fa:16:3e:2d:14:69", "network": {"id": "749d8822-e19e-4761-9e9f-f5717a49481a", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-136341088-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.135", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6895a4954cb4bc89dab40eb3f655606", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa95af8f2-18", "ovs_interfaceid": "a95af8f2-189b-449d-974d-b380402c6a3f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 726.796615] env[69328]: DEBUG nova.scheduler.client.report [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 726.867206] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273056, 'name': CreateVM_Task, 'duration_secs': 0.549052} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.867496] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 726.868485] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 726.868752] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 726.869230] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 726.869658] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87b312f5-3383-4752-8668-86e9a441eae8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.875901] env[69328]: DEBUG oslo_vmware.api [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Waiting for the task: (returnval){ [ 726.875901] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]529c0ef2-651d-11f8-b376-6b4904a88654" [ 726.875901] env[69328]: _type = "Task" [ 726.875901] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.890833] env[69328]: DEBUG oslo_vmware.api [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]529c0ef2-651d-11f8-b376-6b4904a88654, 'name': SearchDatastore_Task, 'duration_secs': 0.009614} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.891202] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 726.891472] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 726.891749] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 726.891932] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 726.892185] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 726.892533] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-30e77e06-4d0d-45b9-acb7-bfcc82161648 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.899213] env[69328]: DEBUG oslo_concurrency.lockutils [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 726.900185] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "55f44102-2891-4b6c-b31e-e8255a24d180" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 726.900425] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "55f44102-2891-4b6c-b31e-e8255a24d180" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 726.906621] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 726.906912] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 726.907977] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b479c19-073d-42ac-a1b3-1b4f3d6cb3bc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.913376] env[69328]: DEBUG oslo_vmware.api [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Waiting for the task: (returnval){ [ 726.913376] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52fc6bc5-6807-6baf-8fec-08b9b0dee646" [ 726.913376] env[69328]: _type = "Task" [ 726.913376] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.925509] env[69328]: DEBUG oslo_vmware.api [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52fc6bc5-6807-6baf-8fec-08b9b0dee646, 'name': SearchDatastore_Task, 'duration_secs': 0.009002} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.926286] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c2036520-482b-4214-8d0a-718306149dd2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.932019] env[69328]: DEBUG oslo_vmware.api [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Waiting for the task: (returnval){ [ 726.932019] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ed3679-8e6a-4b7c-5091-3259374f6454" [ 726.932019] env[69328]: _type = "Task" [ 726.932019] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.941605] env[69328]: DEBUG oslo_vmware.api [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ed3679-8e6a-4b7c-5091-3259374f6454, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.071515] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3dfbb600-41b4-427f-be91-f569b0800126 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Acquiring lock "1e7e9e6e-c084-480c-8653-8441c13d7514" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 727.071663] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3dfbb600-41b4-427f-be91-f569b0800126 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Lock "1e7e9e6e-c084-480c-8653-8441c13d7514" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 727.072026] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3dfbb600-41b4-427f-be91-f569b0800126 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Acquiring lock "1e7e9e6e-c084-480c-8653-8441c13d7514-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 727.072231] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3dfbb600-41b4-427f-be91-f569b0800126 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Lock "1e7e9e6e-c084-480c-8653-8441c13d7514-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 727.072394] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3dfbb600-41b4-427f-be91-f569b0800126 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Lock "1e7e9e6e-c084-480c-8653-8441c13d7514-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 727.075237] env[69328]: INFO nova.compute.manager [None req-3dfbb600-41b4-427f-be91-f569b0800126 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] Terminating instance [ 727.249026] env[69328]: DEBUG oslo_vmware.api [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Task: {'id': task-3273058, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.259303] env[69328]: DEBUG oslo_concurrency.lockutils [req-9e8427bf-d46a-445f-abb3-21588c485aaf req-e1871e5e-5f0e-44c0-8931-2f08fab53ad4 service nova] Releasing lock "refresh_cache-e5d3df12-5334-44c8-9a44-1674e57918bb" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 727.293297] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d6c0f9fc-d096-40eb-b1e7-8a646001a644 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Releasing lock "refresh_cache-46526210-2783-408d-9ecb-773f33ff0c66" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 727.293553] env[69328]: DEBUG nova.compute.manager [None req-d6c0f9fc-d096-40eb-b1e7-8a646001a644 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Inject network info {{(pid=69328) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 727.294396] env[69328]: DEBUG nova.compute.manager [None req-d6c0f9fc-d096-40eb-b1e7-8a646001a644 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] network_info to inject: |[{"id": "a95af8f2-189b-449d-974d-b380402c6a3f", "address": "fa:16:3e:2d:14:69", "network": {"id": "749d8822-e19e-4761-9e9f-f5717a49481a", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-136341088-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.135", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6895a4954cb4bc89dab40eb3f655606", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa95af8f2-18", "ovs_interfaceid": "a95af8f2-189b-449d-974d-b380402c6a3f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 727.299097] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d6c0f9fc-d096-40eb-b1e7-8a646001a644 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Reconfiguring VM instance to set the machine id {{(pid=69328) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 727.299533] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5c773a6e-1dff-469d-b477-183362817d64 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.310524] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.800s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 727.311134] env[69328]: DEBUG nova.compute.manager [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 727.314322] env[69328]: DEBUG oslo_concurrency.lockutils [None req-53e9d6fb-ac70-448d-a705-85e6c8f30036 tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.823s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 727.314322] env[69328]: DEBUG nova.objects.instance [None req-53e9d6fb-ac70-448d-a705-85e6c8f30036 tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Lazy-loading 'resources' on Instance uuid c3673531-9167-4d33-b8ce-d6afa5e589bc {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 727.321070] env[69328]: DEBUG oslo_vmware.api [None req-d6c0f9fc-d096-40eb-b1e7-8a646001a644 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Waiting for the task: (returnval){ [ 727.321070] env[69328]: value = "task-3273059" [ 727.321070] env[69328]: _type = "Task" [ 727.321070] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.332544] env[69328]: DEBUG oslo_vmware.api [None req-d6c0f9fc-d096-40eb-b1e7-8a646001a644 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Task: {'id': task-3273059, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.444030] env[69328]: DEBUG oslo_vmware.api [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ed3679-8e6a-4b7c-5091-3259374f6454, 'name': SearchDatastore_Task, 'duration_secs': 0.009509} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.444174] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 727.444444] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] e5d3df12-5334-44c8-9a44-1674e57918bb/e5d3df12-5334-44c8-9a44-1674e57918bb.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 727.444709] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cc1e2f4d-5556-4bfa-936c-95b6ac716ea5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.453071] env[69328]: DEBUG oslo_vmware.api [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Waiting for the task: (returnval){ [ 727.453071] env[69328]: value = "task-3273060" [ 727.453071] env[69328]: _type = "Task" [ 727.453071] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.461036] env[69328]: DEBUG oslo_vmware.api [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Task: {'id': task-3273060, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.497913] env[69328]: DEBUG nova.objects.instance [None req-0ed82f64-0f31-45d2-8136-b76441502164 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Lazy-loading 'flavor' on Instance uuid 46526210-2783-408d-9ecb-773f33ff0c66 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 727.500010] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6f37e08b-f51f-46fa-b487-d83cc96036f5 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Acquiring lock "146a3eef-0971-4f6e-bd24-58b38a1de0ed" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 727.500243] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6f37e08b-f51f-46fa-b487-d83cc96036f5 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Lock "146a3eef-0971-4f6e-bd24-58b38a1de0ed" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 727.500431] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6f37e08b-f51f-46fa-b487-d83cc96036f5 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Acquiring lock "146a3eef-0971-4f6e-bd24-58b38a1de0ed-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 727.500616] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6f37e08b-f51f-46fa-b487-d83cc96036f5 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Lock "146a3eef-0971-4f6e-bd24-58b38a1de0ed-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 727.501541] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6f37e08b-f51f-46fa-b487-d83cc96036f5 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Lock "146a3eef-0971-4f6e-bd24-58b38a1de0ed-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 727.503797] env[69328]: INFO nova.compute.manager [None req-6f37e08b-f51f-46fa-b487-d83cc96036f5 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] Terminating instance [ 727.579279] env[69328]: DEBUG nova.compute.manager [None req-3dfbb600-41b4-427f-be91-f569b0800126 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 727.579279] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3dfbb600-41b4-427f-be91-f569b0800126 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 727.580153] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2afa8b2-aaa2-4677-84c1-ce0f776ce955 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.588989] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dfbb600-41b4-427f-be91-f569b0800126 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 727.589266] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8e6e5fec-657f-4178-9de0-373c974727e1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.595664] env[69328]: DEBUG oslo_vmware.api [None req-3dfbb600-41b4-427f-be91-f569b0800126 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Waiting for the task: (returnval){ [ 727.595664] env[69328]: value = "task-3273061" [ 727.595664] env[69328]: _type = "Task" [ 727.595664] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.606362] env[69328]: DEBUG oslo_vmware.api [None req-3dfbb600-41b4-427f-be91-f569b0800126 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3273061, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.752768] env[69328]: DEBUG oslo_vmware.api [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Task: {'id': task-3273058, 'name': ReconfigVM_Task, 'duration_secs': 0.539115} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.753173] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] Reconfigured VM instance instance-00000022 to attach disk [datastore1] 4c54c0dd-32f1-4d35-b770-3e1a540c54a7/4c54c0dd-32f1-4d35-b770-3e1a540c54a7.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 727.753955] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-45d253b7-6903-4c42-af37-0028fd41aae1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.764454] env[69328]: DEBUG oslo_vmware.api [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Waiting for the task: (returnval){ [ 727.764454] env[69328]: value = "task-3273062" [ 727.764454] env[69328]: _type = "Task" [ 727.764454] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.774678] env[69328]: DEBUG oslo_vmware.api [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Task: {'id': task-3273062, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.819261] env[69328]: DEBUG nova.compute.utils [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 727.823719] env[69328]: DEBUG nova.compute.manager [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Not allocating networking since 'none' was specified. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 727.837308] env[69328]: DEBUG oslo_vmware.api [None req-d6c0f9fc-d096-40eb-b1e7-8a646001a644 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Task: {'id': task-3273059, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.962620] env[69328]: DEBUG nova.compute.manager [req-451d3d66-272b-46a1-a567-b847c05024df req-9edef07e-2850-42d7-b8c8-609bfb3f49b1 service nova] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Received event network-changed-a95af8f2-189b-449d-974d-b380402c6a3f {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 727.962855] env[69328]: DEBUG nova.compute.manager [req-451d3d66-272b-46a1-a567-b847c05024df req-9edef07e-2850-42d7-b8c8-609bfb3f49b1 service nova] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Refreshing instance network info cache due to event network-changed-a95af8f2-189b-449d-974d-b380402c6a3f. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 727.963492] env[69328]: DEBUG oslo_concurrency.lockutils [req-451d3d66-272b-46a1-a567-b847c05024df req-9edef07e-2850-42d7-b8c8-609bfb3f49b1 service nova] Acquiring lock "refresh_cache-46526210-2783-408d-9ecb-773f33ff0c66" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 727.963492] env[69328]: DEBUG oslo_concurrency.lockutils [req-451d3d66-272b-46a1-a567-b847c05024df req-9edef07e-2850-42d7-b8c8-609bfb3f49b1 service nova] Acquired lock "refresh_cache-46526210-2783-408d-9ecb-773f33ff0c66" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 727.963492] env[69328]: DEBUG nova.network.neutron [req-451d3d66-272b-46a1-a567-b847c05024df req-9edef07e-2850-42d7-b8c8-609bfb3f49b1 service nova] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Refreshing network info cache for port a95af8f2-189b-449d-974d-b380402c6a3f {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 727.971901] env[69328]: DEBUG oslo_vmware.api [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Task: {'id': task-3273060, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.474195} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.972929] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] e5d3df12-5334-44c8-9a44-1674e57918bb/e5d3df12-5334-44c8-9a44-1674e57918bb.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 727.973206] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 727.973468] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-52853086-2529-49e9-9de4-707283e6a760 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.983437] env[69328]: DEBUG oslo_vmware.api [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Waiting for the task: (returnval){ [ 727.983437] env[69328]: value = "task-3273063" [ 727.983437] env[69328]: _type = "Task" [ 727.983437] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.995696] env[69328]: DEBUG oslo_vmware.api [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Task: {'id': task-3273063, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.006139] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0ed82f64-0f31-45d2-8136-b76441502164 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Acquiring lock "refresh_cache-46526210-2783-408d-9ecb-773f33ff0c66" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 728.009698] env[69328]: DEBUG nova.compute.manager [None req-6f37e08b-f51f-46fa-b487-d83cc96036f5 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 728.009893] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-6f37e08b-f51f-46fa-b487-d83cc96036f5 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 728.011056] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e5aa9d5-2a32-46fa-b1d2-012c824d4a94 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.018370] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f37e08b-f51f-46fa-b487-d83cc96036f5 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 728.018628] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-23089558-be89-441d-97e1-6369ba92e30d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.026176] env[69328]: DEBUG oslo_vmware.api [None req-6f37e08b-f51f-46fa-b487-d83cc96036f5 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Waiting for the task: (returnval){ [ 728.026176] env[69328]: value = "task-3273064" [ 728.026176] env[69328]: _type = "Task" [ 728.026176] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.034826] env[69328]: DEBUG oslo_vmware.api [None req-6f37e08b-f51f-46fa-b487-d83cc96036f5 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3273064, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.107938] env[69328]: DEBUG oslo_vmware.api [None req-3dfbb600-41b4-427f-be91-f569b0800126 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3273061, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.275834] env[69328]: DEBUG oslo_vmware.api [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Task: {'id': task-3273062, 'name': Rename_Task, 'duration_secs': 0.230093} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.276133] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 728.276388] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-888f237c-02ad-4144-887d-83aafa868a97 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.282665] env[69328]: DEBUG oslo_vmware.api [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Waiting for the task: (returnval){ [ 728.282665] env[69328]: value = "task-3273065" [ 728.282665] env[69328]: _type = "Task" [ 728.282665] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.291086] env[69328]: DEBUG oslo_vmware.api [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Task: {'id': task-3273065, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.326207] env[69328]: DEBUG nova.compute.manager [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 728.340336] env[69328]: DEBUG oslo_vmware.api [None req-d6c0f9fc-d096-40eb-b1e7-8a646001a644 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Task: {'id': task-3273059, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.454666] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6841661f-6008-499a-bee8-5c1aa3249f23 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.466072] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d96a51b-8742-42e7-887d-63f95326bdb7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.499893] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d485eb8e-02dc-4ab5-9fd6-0f3d6521bd1f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.509027] env[69328]: DEBUG oslo_vmware.api [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Task: {'id': task-3273063, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.090702} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.511438] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 728.513902] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31be9f65-95a9-4f27-9057-ec0aaccfd53c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.516628] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91d5d1c9-b43a-4983-9306-10edbb517944 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.530811] env[69328]: DEBUG nova.compute.provider_tree [None req-53e9d6fb-ac70-448d-a705-85e6c8f30036 tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 728.553283] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] Reconfiguring VM instance instance-00000023 to attach disk [datastore1] e5d3df12-5334-44c8-9a44-1674e57918bb/e5d3df12-5334-44c8-9a44-1674e57918bb.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 728.555188] env[69328]: DEBUG nova.scheduler.client.report [None req-53e9d6fb-ac70-448d-a705-85e6c8f30036 tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 728.558497] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9522b53c-abc1-4a7f-bc9e-a75bbe801481 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.577131] env[69328]: DEBUG oslo_concurrency.lockutils [None req-53e9d6fb-ac70-448d-a705-85e6c8f30036 tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.263s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 728.579673] env[69328]: DEBUG oslo_concurrency.lockutils [None req-35d1b3a3-3800-4ca4-b360-cce541dbcceb tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.936s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 728.579985] env[69328]: DEBUG nova.objects.instance [None req-35d1b3a3-3800-4ca4-b360-cce541dbcceb tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Lazy-loading 'resources' on Instance uuid 6102f8e6-f815-4f5f-921f-990be81fca0d {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 728.585869] env[69328]: DEBUG oslo_vmware.api [None req-6f37e08b-f51f-46fa-b487-d83cc96036f5 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3273064, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.587889] env[69328]: DEBUG oslo_vmware.api [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Waiting for the task: (returnval){ [ 728.587889] env[69328]: value = "task-3273066" [ 728.587889] env[69328]: _type = "Task" [ 728.587889] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.599396] env[69328]: DEBUG oslo_vmware.api [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Task: {'id': task-3273066, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.609472] env[69328]: DEBUG oslo_vmware.api [None req-3dfbb600-41b4-427f-be91-f569b0800126 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3273061, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.611694] env[69328]: INFO nova.scheduler.client.report [None req-53e9d6fb-ac70-448d-a705-85e6c8f30036 tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Deleted allocations for instance c3673531-9167-4d33-b8ce-d6afa5e589bc [ 728.799602] env[69328]: DEBUG oslo_vmware.api [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Task: {'id': task-3273065, 'name': PowerOnVM_Task, 'duration_secs': 0.492224} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.800014] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 728.800313] env[69328]: INFO nova.compute.manager [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] Took 8.57 seconds to spawn the instance on the hypervisor. [ 728.800560] env[69328]: DEBUG nova.compute.manager [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 728.801718] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9be95a9d-e65e-48bf-825c-ab9d12347b1f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.846448] env[69328]: DEBUG oslo_vmware.api [None req-d6c0f9fc-d096-40eb-b1e7-8a646001a644 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Task: {'id': task-3273059, 'name': ReconfigVM_Task, 'duration_secs': 1.207618} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.846728] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d6c0f9fc-d096-40eb-b1e7-8a646001a644 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Reconfigured VM instance to set the machine id {{(pid=69328) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 728.977840] env[69328]: DEBUG nova.network.neutron [req-451d3d66-272b-46a1-a567-b847c05024df req-9edef07e-2850-42d7-b8c8-609bfb3f49b1 service nova] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Updated VIF entry in instance network info cache for port a95af8f2-189b-449d-974d-b380402c6a3f. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 728.978243] env[69328]: DEBUG nova.network.neutron [req-451d3d66-272b-46a1-a567-b847c05024df req-9edef07e-2850-42d7-b8c8-609bfb3f49b1 service nova] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Updating instance_info_cache with network_info: [{"id": "a95af8f2-189b-449d-974d-b380402c6a3f", "address": "fa:16:3e:2d:14:69", "network": {"id": "749d8822-e19e-4761-9e9f-f5717a49481a", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-136341088-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.135", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6895a4954cb4bc89dab40eb3f655606", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa95af8f2-18", "ovs_interfaceid": "a95af8f2-189b-449d-974d-b380402c6a3f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 729.043284] env[69328]: DEBUG oslo_vmware.api [None req-6f37e08b-f51f-46fa-b487-d83cc96036f5 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3273064, 'name': PowerOffVM_Task, 'duration_secs': 0.719581} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.043284] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f37e08b-f51f-46fa-b487-d83cc96036f5 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 729.043284] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-6f37e08b-f51f-46fa-b487-d83cc96036f5 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 729.043284] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8b37f067-76ec-470f-97c0-33634f2e6fd6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.099858] env[69328]: DEBUG oslo_vmware.api [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Task: {'id': task-3273066, 'name': ReconfigVM_Task, 'duration_secs': 0.412409} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.099858] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] Reconfigured VM instance instance-00000023 to attach disk [datastore1] e5d3df12-5334-44c8-9a44-1674e57918bb/e5d3df12-5334-44c8-9a44-1674e57918bb.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 729.104224] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3bf87826-2755-4324-a1f4-d070653f3c1a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.113795] env[69328]: DEBUG oslo_vmware.api [None req-3dfbb600-41b4-427f-be91-f569b0800126 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3273061, 'name': PowerOffVM_Task, 'duration_secs': 1.148687} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.118378] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dfbb600-41b4-427f-be91-f569b0800126 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 729.118604] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3dfbb600-41b4-427f-be91-f569b0800126 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 729.119100] env[69328]: DEBUG oslo_vmware.api [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Waiting for the task: (returnval){ [ 729.119100] env[69328]: value = "task-3273068" [ 729.119100] env[69328]: _type = "Task" [ 729.119100] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.120230] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-6f37e08b-f51f-46fa-b487-d83cc96036f5 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 729.120230] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-6f37e08b-f51f-46fa-b487-d83cc96036f5 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 729.120230] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f37e08b-f51f-46fa-b487-d83cc96036f5 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Deleting the datastore file [datastore2] 146a3eef-0971-4f6e-bd24-58b38a1de0ed {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 729.120559] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7f5ceee3-7acb-4c99-a680-8713166c4b3f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.127017] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c23159f9-9cb1-4dac-84e0-0083c7531121 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.129935] env[69328]: DEBUG oslo_concurrency.lockutils [None req-53e9d6fb-ac70-448d-a705-85e6c8f30036 tempest-ServersNegativeTestMultiTenantJSON-843802200 tempest-ServersNegativeTestMultiTenantJSON-843802200-project-member] Lock "c3673531-9167-4d33-b8ce-d6afa5e589bc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.046s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 729.142498] env[69328]: DEBUG oslo_vmware.api [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Task: {'id': task-3273068, 'name': Rename_Task} progress is 10%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.147040] env[69328]: DEBUG oslo_vmware.api [None req-6f37e08b-f51f-46fa-b487-d83cc96036f5 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Waiting for the task: (returnval){ [ 729.147040] env[69328]: value = "task-3273070" [ 729.147040] env[69328]: _type = "Task" [ 729.147040] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.157342] env[69328]: DEBUG oslo_vmware.api [None req-6f37e08b-f51f-46fa-b487-d83cc96036f5 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3273070, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.216522] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3dfbb600-41b4-427f-be91-f569b0800126 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 729.216918] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3dfbb600-41b4-427f-be91-f569b0800126 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 729.217301] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-3dfbb600-41b4-427f-be91-f569b0800126 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Deleting the datastore file [datastore1] 1e7e9e6e-c084-480c-8653-8441c13d7514 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 729.217654] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e8cc913f-8b7e-498f-a13d-81bc6c7f273f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.229958] env[69328]: DEBUG oslo_vmware.api [None req-3dfbb600-41b4-427f-be91-f569b0800126 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Waiting for the task: (returnval){ [ 729.229958] env[69328]: value = "task-3273071" [ 729.229958] env[69328]: _type = "Task" [ 729.229958] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.241652] env[69328]: DEBUG oslo_vmware.api [None req-3dfbb600-41b4-427f-be91-f569b0800126 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3273071, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.325179] env[69328]: INFO nova.compute.manager [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] Took 49.26 seconds to build instance. [ 729.342114] env[69328]: DEBUG nova.compute.manager [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 729.379782] env[69328]: DEBUG nova.virt.hardware [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 729.380164] env[69328]: DEBUG nova.virt.hardware [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 729.380377] env[69328]: DEBUG nova.virt.hardware [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 729.380623] env[69328]: DEBUG nova.virt.hardware [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 729.380922] env[69328]: DEBUG nova.virt.hardware [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 729.381256] env[69328]: DEBUG nova.virt.hardware [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 729.381414] env[69328]: DEBUG nova.virt.hardware [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 729.381614] env[69328]: DEBUG nova.virt.hardware [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 729.382290] env[69328]: DEBUG nova.virt.hardware [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 729.382564] env[69328]: DEBUG nova.virt.hardware [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 729.382799] env[69328]: DEBUG nova.virt.hardware [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 729.383789] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fe32ae4-cc7e-4410-9dee-e4282c9bcfd7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.396941] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f02a5d61-08c6-44f3-a183-53a968c34bc2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.415044] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Instance VIF info [] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 729.421703] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Creating folder: Project (b66e1fb8690c4cf894aff56b7fa5b0d8). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 729.425422] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-211f16c9-0ddc-433a-b611-e63e45d8c1cf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.440813] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Created folder: Project (b66e1fb8690c4cf894aff56b7fa5b0d8) in parent group-v653649. [ 729.441153] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Creating folder: Instances. Parent ref: group-v653765. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 729.444173] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-87c32304-d2b1-442a-828e-afd734f65da5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.456636] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Created folder: Instances in parent group-v653765. [ 729.456951] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 729.457181] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 729.457415] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-76e3d0b9-ad14-4e8a-bcbe-c55b0211af44 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.481041] env[69328]: DEBUG oslo_concurrency.lockutils [req-451d3d66-272b-46a1-a567-b847c05024df req-9edef07e-2850-42d7-b8c8-609bfb3f49b1 service nova] Releasing lock "refresh_cache-46526210-2783-408d-9ecb-773f33ff0c66" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 729.481204] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 729.481204] env[69328]: value = "task-3273074" [ 729.481204] env[69328]: _type = "Task" [ 729.481204] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.481423] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0ed82f64-0f31-45d2-8136-b76441502164 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Acquired lock "refresh_cache-46526210-2783-408d-9ecb-773f33ff0c66" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 729.496575] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273074, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.638976] env[69328]: DEBUG oslo_vmware.api [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Task: {'id': task-3273068, 'name': Rename_Task, 'duration_secs': 0.215895} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.638976] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 729.639338] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-57c0973e-610c-4649-a8e6-b340f7f376f0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.666481] env[69328]: DEBUG oslo_vmware.api [None req-6f37e08b-f51f-46fa-b487-d83cc96036f5 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3273070, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.186202} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.671869] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f37e08b-f51f-46fa-b487-d83cc96036f5 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 729.676114] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-6f37e08b-f51f-46fa-b487-d83cc96036f5 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 729.676114] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-6f37e08b-f51f-46fa-b487-d83cc96036f5 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 729.676114] env[69328]: INFO nova.compute.manager [None req-6f37e08b-f51f-46fa-b487-d83cc96036f5 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] Took 1.67 seconds to destroy the instance on the hypervisor. [ 729.676114] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6f37e08b-f51f-46fa-b487-d83cc96036f5 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 729.676429] env[69328]: DEBUG oslo_vmware.api [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Waiting for the task: (returnval){ [ 729.676429] env[69328]: value = "task-3273075" [ 729.676429] env[69328]: _type = "Task" [ 729.676429] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.677138] env[69328]: DEBUG nova.compute.manager [-] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 729.677138] env[69328]: DEBUG nova.network.neutron [-] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 729.689479] env[69328]: DEBUG oslo_vmware.api [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Task: {'id': task-3273075, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.742328] env[69328]: DEBUG oslo_vmware.api [None req-3dfbb600-41b4-427f-be91-f569b0800126 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Task: {'id': task-3273071, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.152062} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.742328] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-3dfbb600-41b4-427f-be91-f569b0800126 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 729.742328] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3dfbb600-41b4-427f-be91-f569b0800126 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 729.742328] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3dfbb600-41b4-427f-be91-f569b0800126 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 729.742328] env[69328]: INFO nova.compute.manager [None req-3dfbb600-41b4-427f-be91-f569b0800126 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] Took 2.16 seconds to destroy the instance on the hypervisor. [ 729.742593] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3dfbb600-41b4-427f-be91-f569b0800126 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 729.742708] env[69328]: DEBUG nova.compute.manager [-] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 729.742797] env[69328]: DEBUG nova.network.neutron [-] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 729.795629] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31c123f8-d6de-42f7-b2ec-2e53df2fee98 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.804286] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fe41a8a-467b-4489-b400-e82c2388bcec {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.841957] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8b5d5b25-8692-4d15-b0de-7470fd5d7008 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Lock "4c54c0dd-32f1-4d35-b770-3e1a540c54a7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 84.056s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 729.847512] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbc09ff3-d7cc-4372-9af3-674f44ac55e2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.855415] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80409bf4-a251-4380-87af-8fb3e37485c7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.873955] env[69328]: DEBUG nova.compute.provider_tree [None req-35d1b3a3-3800-4ca4-b360-cce541dbcceb tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 730.002121] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273074, 'name': CreateVM_Task, 'duration_secs': 0.456775} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.002121] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 730.002121] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 730.002121] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 730.002774] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 730.003370] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d136f3b1-6b11-45b4-aaf7-e16e65def8ca {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.012033] env[69328]: DEBUG oslo_vmware.api [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Waiting for the task: (returnval){ [ 730.012033] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]527dc788-f8d4-670f-9cc7-2209b9b8f8cd" [ 730.012033] env[69328]: _type = "Task" [ 730.012033] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.026274] env[69328]: DEBUG oslo_vmware.api [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]527dc788-f8d4-670f-9cc7-2209b9b8f8cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.192239] env[69328]: DEBUG oslo_vmware.api [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Task: {'id': task-3273075, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.333725] env[69328]: DEBUG oslo_vmware.rw_handles [None req-f7e862ec-1d8a-45e3-bd24-c67aaf7f2c3e tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52679d73-e965-e6eb-803b-6ff1bcc7fe65/disk-0.vmdk. {{(pid=69328) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 730.335170] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65aba25e-8db9-4189-8692-5b4194a9028b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.343162] env[69328]: DEBUG oslo_vmware.rw_handles [None req-f7e862ec-1d8a-45e3-bd24-c67aaf7f2c3e tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52679d73-e965-e6eb-803b-6ff1bcc7fe65/disk-0.vmdk is in state: ready. {{(pid=69328) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 730.343353] env[69328]: ERROR oslo_vmware.rw_handles [None req-f7e862ec-1d8a-45e3-bd24-c67aaf7f2c3e tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52679d73-e965-e6eb-803b-6ff1bcc7fe65/disk-0.vmdk due to incomplete transfer. [ 730.343598] env[69328]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-78a7f65d-7703-4c5b-aeba-a551cb604808 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.347893] env[69328]: DEBUG nova.compute.manager [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 730.356477] env[69328]: DEBUG oslo_vmware.rw_handles [None req-f7e862ec-1d8a-45e3-bd24-c67aaf7f2c3e tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52679d73-e965-e6eb-803b-6ff1bcc7fe65/disk-0.vmdk. {{(pid=69328) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 730.356706] env[69328]: DEBUG nova.virt.vmwareapi.images [None req-f7e862ec-1d8a-45e3-bd24-c67aaf7f2c3e tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Uploaded image e1e4d75f-9329-4173-b193-da66aa2bc3ab to the Glance image server {{(pid=69328) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 730.359106] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7e862ec-1d8a-45e3-bd24-c67aaf7f2c3e tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Destroying the VM {{(pid=69328) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 730.359741] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-c49b4c7c-8fe6-4303-a318-ca566391c556 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.372094] env[69328]: DEBUG oslo_vmware.api [None req-f7e862ec-1d8a-45e3-bd24-c67aaf7f2c3e tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Waiting for the task: (returnval){ [ 730.372094] env[69328]: value = "task-3273076" [ 730.372094] env[69328]: _type = "Task" [ 730.372094] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.378191] env[69328]: DEBUG nova.scheduler.client.report [None req-35d1b3a3-3800-4ca4-b360-cce541dbcceb tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 730.385341] env[69328]: DEBUG oslo_vmware.api [None req-f7e862ec-1d8a-45e3-bd24-c67aaf7f2c3e tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3273076, 'name': Destroy_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.463985] env[69328]: DEBUG nova.compute.manager [req-30debf73-5a7e-43bf-9749-923b98f3e0e5 req-3485ed5e-9850-40a4-be3f-d0dea329129f service nova] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] Received event network-vif-deleted-60d09662-fefa-479c-b18f-6c4109ede4e4 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 730.464164] env[69328]: INFO nova.compute.manager [req-30debf73-5a7e-43bf-9749-923b98f3e0e5 req-3485ed5e-9850-40a4-be3f-d0dea329129f service nova] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] Neutron deleted interface 60d09662-fefa-479c-b18f-6c4109ede4e4; detaching it from the instance and deleting it from the info cache [ 730.464335] env[69328]: DEBUG nova.network.neutron [req-30debf73-5a7e-43bf-9749-923b98f3e0e5 req-3485ed5e-9850-40a4-be3f-d0dea329129f service nova] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.495484] env[69328]: DEBUG oslo_concurrency.lockutils [None req-76f2b362-e0df-4234-aef2-d298bd07cb45 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Acquiring lock "4c54c0dd-32f1-4d35-b770-3e1a540c54a7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 730.495750] env[69328]: DEBUG oslo_concurrency.lockutils [None req-76f2b362-e0df-4234-aef2-d298bd07cb45 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Lock "4c54c0dd-32f1-4d35-b770-3e1a540c54a7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 730.495986] env[69328]: DEBUG oslo_concurrency.lockutils [None req-76f2b362-e0df-4234-aef2-d298bd07cb45 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Acquiring lock "4c54c0dd-32f1-4d35-b770-3e1a540c54a7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 730.496190] env[69328]: DEBUG oslo_concurrency.lockutils [None req-76f2b362-e0df-4234-aef2-d298bd07cb45 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Lock "4c54c0dd-32f1-4d35-b770-3e1a540c54a7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 730.496355] env[69328]: DEBUG oslo_concurrency.lockutils [None req-76f2b362-e0df-4234-aef2-d298bd07cb45 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Lock "4c54c0dd-32f1-4d35-b770-3e1a540c54a7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 730.498589] env[69328]: INFO nova.compute.manager [None req-76f2b362-e0df-4234-aef2-d298bd07cb45 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] Terminating instance [ 730.530737] env[69328]: DEBUG oslo_vmware.api [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]527dc788-f8d4-670f-9cc7-2209b9b8f8cd, 'name': SearchDatastore_Task, 'duration_secs': 0.017423} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.532826] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 730.533153] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 730.533393] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 730.533532] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 730.533703] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 730.534455] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f52f267b-3af6-4cd4-a304-70cd9612499a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.545213] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 730.545415] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 730.547028] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d181a676-b3f3-4b59-80e1-c1ae2cf57498 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.554017] env[69328]: DEBUG oslo_vmware.api [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Waiting for the task: (returnval){ [ 730.554017] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]520a4bb8-cc9f-f920-ff01-766c57f17430" [ 730.554017] env[69328]: _type = "Task" [ 730.554017] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.563613] env[69328]: DEBUG oslo_vmware.api [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]520a4bb8-cc9f-f920-ff01-766c57f17430, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.575104] env[69328]: DEBUG nova.network.neutron [-] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.696187] env[69328]: DEBUG oslo_vmware.api [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Task: {'id': task-3273075, 'name': PowerOnVM_Task, 'duration_secs': 0.620938} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.696187] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 730.696616] env[69328]: INFO nova.compute.manager [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] Took 7.77 seconds to spawn the instance on the hypervisor. [ 730.696808] env[69328]: DEBUG nova.compute.manager [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 730.697630] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35bd6a71-6671-4fbe-8416-1e646c933325 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.870170] env[69328]: DEBUG nova.network.neutron [None req-0ed82f64-0f31-45d2-8136-b76441502164 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 730.888866] env[69328]: DEBUG oslo_concurrency.lockutils [None req-35d1b3a3-3800-4ca4-b360-cce541dbcceb tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.309s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 730.891169] env[69328]: DEBUG oslo_vmware.api [None req-f7e862ec-1d8a-45e3-bd24-c67aaf7f2c3e tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3273076, 'name': Destroy_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.895088] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.841s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 730.896772] env[69328]: INFO nova.compute.claims [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 730.900396] env[69328]: DEBUG nova.compute.manager [req-7a248bab-3861-49c8-b1a4-89918f8395a9 req-d47b96e1-c059-41b0-8bdf-aee67adc677c service nova] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] Received event network-vif-deleted-dd70e166-7f6d-4b58-b33d-e1c74a5da1f1 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 730.900574] env[69328]: INFO nova.compute.manager [req-7a248bab-3861-49c8-b1a4-89918f8395a9 req-d47b96e1-c059-41b0-8bdf-aee67adc677c service nova] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] Neutron deleted interface dd70e166-7f6d-4b58-b33d-e1c74a5da1f1; detaching it from the instance and deleting it from the info cache [ 730.900737] env[69328]: DEBUG nova.network.neutron [req-7a248bab-3861-49c8-b1a4-89918f8395a9 req-d47b96e1-c059-41b0-8bdf-aee67adc677c service nova] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.904078] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 730.916412] env[69328]: INFO nova.scheduler.client.report [None req-35d1b3a3-3800-4ca4-b360-cce541dbcceb tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Deleted allocations for instance 6102f8e6-f815-4f5f-921f-990be81fca0d [ 730.969138] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fdc8b2dc-359b-435b-b97b-4abb507dd1c8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.984225] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55eb7ea9-438b-48f0-9a35-cebd3a503f26 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.005104] env[69328]: DEBUG nova.compute.manager [None req-76f2b362-e0df-4234-aef2-d298bd07cb45 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 731.005543] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-76f2b362-e0df-4234-aef2-d298bd07cb45 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 731.006428] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41942d5b-713a-4c25-91dc-db10cfc25bd4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.032110] env[69328]: DEBUG nova.compute.manager [req-30debf73-5a7e-43bf-9749-923b98f3e0e5 req-3485ed5e-9850-40a4-be3f-d0dea329129f service nova] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] Detach interface failed, port_id=60d09662-fefa-479c-b18f-6c4109ede4e4, reason: Instance 1e7e9e6e-c084-480c-8653-8441c13d7514 could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 731.039325] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-76f2b362-e0df-4234-aef2-d298bd07cb45 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 731.039325] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8b3aee99-6f2f-414b-bd91-ada2c7a3ba57 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.047800] env[69328]: DEBUG oslo_vmware.api [None req-76f2b362-e0df-4234-aef2-d298bd07cb45 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Waiting for the task: (returnval){ [ 731.047800] env[69328]: value = "task-3273077" [ 731.047800] env[69328]: _type = "Task" [ 731.047800] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.064809] env[69328]: DEBUG oslo_vmware.api [None req-76f2b362-e0df-4234-aef2-d298bd07cb45 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Task: {'id': task-3273077, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.072427] env[69328]: DEBUG oslo_vmware.api [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]520a4bb8-cc9f-f920-ff01-766c57f17430, 'name': SearchDatastore_Task, 'duration_secs': 0.01591} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.076408] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24355598-23b4-4dbc-b70c-09c0fe557da2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.077053] env[69328]: INFO nova.compute.manager [-] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] Took 1.33 seconds to deallocate network for instance. [ 731.080601] env[69328]: DEBUG oslo_vmware.api [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Waiting for the task: (returnval){ [ 731.080601] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52fb530a-cd8a-a212-8cf4-b316797da718" [ 731.080601] env[69328]: _type = "Task" [ 731.080601] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.095035] env[69328]: DEBUG oslo_vmware.api [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52fb530a-cd8a-a212-8cf4-b316797da718, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.225763] env[69328]: INFO nova.compute.manager [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] Took 49.84 seconds to build instance. [ 731.232230] env[69328]: DEBUG nova.network.neutron [-] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 731.389981] env[69328]: DEBUG oslo_vmware.api [None req-f7e862ec-1d8a-45e3-bd24-c67aaf7f2c3e tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3273076, 'name': Destroy_Task} progress is 33%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.405229] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e87a6995-2f95-4826-bfc8-efa1adddb5e0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.419372] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e511be1-0b8d-4f25-b02d-c53d6deaa929 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.435724] env[69328]: DEBUG oslo_concurrency.lockutils [None req-35d1b3a3-3800-4ca4-b360-cce541dbcceb tempest-ImagesOneServerTestJSON-560227128 tempest-ImagesOneServerTestJSON-560227128-project-member] Lock "6102f8e6-f815-4f5f-921f-990be81fca0d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.382s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 731.459660] env[69328]: DEBUG nova.compute.manager [req-7a248bab-3861-49c8-b1a4-89918f8395a9 req-d47b96e1-c059-41b0-8bdf-aee67adc677c service nova] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] Detach interface failed, port_id=dd70e166-7f6d-4b58-b33d-e1c74a5da1f1, reason: Instance 146a3eef-0971-4f6e-bd24-58b38a1de0ed could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 731.561210] env[69328]: DEBUG oslo_vmware.api [None req-76f2b362-e0df-4234-aef2-d298bd07cb45 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Task: {'id': task-3273077, 'name': PowerOffVM_Task, 'duration_secs': 0.212231} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.561210] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-76f2b362-e0df-4234-aef2-d298bd07cb45 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 731.561210] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-76f2b362-e0df-4234-aef2-d298bd07cb45 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 731.561210] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b063653a-0b6b-401f-94ac-3fc37633a51b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.590051] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3dfbb600-41b4-427f-be91-f569b0800126 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 731.595073] env[69328]: DEBUG oslo_vmware.api [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52fb530a-cd8a-a212-8cf4-b316797da718, 'name': SearchDatastore_Task, 'duration_secs': 0.019682} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.595383] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 731.595773] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 732342ea-2f73-40ea-a826-883ddc7a385a/732342ea-2f73-40ea-a826-883ddc7a385a.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 731.596138] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f92c6b7b-7df0-47e8-9aff-161cac281f2a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.603266] env[69328]: DEBUG oslo_vmware.api [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Waiting for the task: (returnval){ [ 731.603266] env[69328]: value = "task-3273079" [ 731.603266] env[69328]: _type = "Task" [ 731.603266] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.614328] env[69328]: DEBUG oslo_vmware.api [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Task: {'id': task-3273079, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.630720] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-76f2b362-e0df-4234-aef2-d298bd07cb45 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 731.630720] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-76f2b362-e0df-4234-aef2-d298bd07cb45 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 731.630941] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-76f2b362-e0df-4234-aef2-d298bd07cb45 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Deleting the datastore file [datastore1] 4c54c0dd-32f1-4d35-b770-3e1a540c54a7 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 731.633534] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-db82d816-1dbc-402b-84e3-fa3db2266e1b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.640237] env[69328]: DEBUG oslo_vmware.api [None req-76f2b362-e0df-4234-aef2-d298bd07cb45 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Waiting for the task: (returnval){ [ 731.640237] env[69328]: value = "task-3273080" [ 731.640237] env[69328]: _type = "Task" [ 731.640237] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.661934] env[69328]: DEBUG oslo_vmware.api [None req-76f2b362-e0df-4234-aef2-d298bd07cb45 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Task: {'id': task-3273080, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.728430] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c08f149f-23fd-4506-bc9d-fb28c321fa9b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Lock "e5d3df12-5334-44c8-9a44-1674e57918bb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 85.220s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 731.739047] env[69328]: INFO nova.compute.manager [-] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] Took 2.06 seconds to deallocate network for instance. [ 731.891194] env[69328]: DEBUG oslo_vmware.api [None req-f7e862ec-1d8a-45e3-bd24-c67aaf7f2c3e tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3273076, 'name': Destroy_Task, 'duration_secs': 1.134321} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.892165] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-f7e862ec-1d8a-45e3-bd24-c67aaf7f2c3e tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Destroyed the VM [ 731.892486] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f7e862ec-1d8a-45e3-bd24-c67aaf7f2c3e tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Deleting Snapshot of the VM instance {{(pid=69328) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 731.892865] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-bd4bb3a6-b406-4ec5-8d6a-7b5bef3b97bb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.905013] env[69328]: DEBUG oslo_vmware.api [None req-f7e862ec-1d8a-45e3-bd24-c67aaf7f2c3e tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Waiting for the task: (returnval){ [ 731.905013] env[69328]: value = "task-3273081" [ 731.905013] env[69328]: _type = "Task" [ 731.905013] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.918958] env[69328]: DEBUG oslo_vmware.api [None req-f7e862ec-1d8a-45e3-bd24-c67aaf7f2c3e tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3273081, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.927173] env[69328]: DEBUG nova.network.neutron [None req-0ed82f64-0f31-45d2-8136-b76441502164 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Updating instance_info_cache with network_info: [{"id": "a95af8f2-189b-449d-974d-b380402c6a3f", "address": "fa:16:3e:2d:14:69", "network": {"id": "749d8822-e19e-4761-9e9f-f5717a49481a", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-136341088-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.135", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6895a4954cb4bc89dab40eb3f655606", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa95af8f2-18", "ovs_interfaceid": "a95af8f2-189b-449d-974d-b380402c6a3f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 732.119846] env[69328]: DEBUG oslo_vmware.api [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Task: {'id': task-3273079, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.491578} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.120142] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 732342ea-2f73-40ea-a826-883ddc7a385a/732342ea-2f73-40ea-a826-883ddc7a385a.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 732.120354] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 732.120623] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-83f9a801-5d5d-456b-908e-8d63b68104a2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.128496] env[69328]: DEBUG oslo_vmware.api [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Waiting for the task: (returnval){ [ 732.128496] env[69328]: value = "task-3273082" [ 732.128496] env[69328]: _type = "Task" [ 732.128496] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.144034] env[69328]: DEBUG oslo_vmware.api [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Task: {'id': task-3273082, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.154643] env[69328]: DEBUG oslo_vmware.api [None req-76f2b362-e0df-4234-aef2-d298bd07cb45 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Task: {'id': task-3273080, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.174482} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.155207] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-76f2b362-e0df-4234-aef2-d298bd07cb45 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 732.155532] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-76f2b362-e0df-4234-aef2-d298bd07cb45 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 732.155839] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-76f2b362-e0df-4234-aef2-d298bd07cb45 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 732.158024] env[69328]: INFO nova.compute.manager [None req-76f2b362-e0df-4234-aef2-d298bd07cb45 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] Took 1.15 seconds to destroy the instance on the hypervisor. [ 732.158024] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-76f2b362-e0df-4234-aef2-d298bd07cb45 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 732.159427] env[69328]: DEBUG nova.compute.manager [-] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 732.159644] env[69328]: DEBUG nova.network.neutron [-] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 732.235834] env[69328]: DEBUG nova.compute.manager [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] [instance: f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 732.248961] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6f37e08b-f51f-46fa-b487-d83cc96036f5 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 732.420518] env[69328]: DEBUG oslo_vmware.api [None req-f7e862ec-1d8a-45e3-bd24-c67aaf7f2c3e tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3273081, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.430229] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0ed82f64-0f31-45d2-8136-b76441502164 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Releasing lock "refresh_cache-46526210-2783-408d-9ecb-773f33ff0c66" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 732.431993] env[69328]: DEBUG nova.compute.manager [None req-0ed82f64-0f31-45d2-8136-b76441502164 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Inject network info {{(pid=69328) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 732.432865] env[69328]: DEBUG nova.compute.manager [None req-0ed82f64-0f31-45d2-8136-b76441502164 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] network_info to inject: |[{"id": "a95af8f2-189b-449d-974d-b380402c6a3f", "address": "fa:16:3e:2d:14:69", "network": {"id": "749d8822-e19e-4761-9e9f-f5717a49481a", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-136341088-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.135", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6895a4954cb4bc89dab40eb3f655606", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa95af8f2-18", "ovs_interfaceid": "a95af8f2-189b-449d-974d-b380402c6a3f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 732.443620] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0ed82f64-0f31-45d2-8136-b76441502164 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Reconfiguring VM instance to set the machine id {{(pid=69328) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 732.447814] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-98beec10-3f45-47bd-9481-488cc683256c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.466261] env[69328]: DEBUG oslo_vmware.api [None req-0ed82f64-0f31-45d2-8136-b76441502164 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Waiting for the task: (returnval){ [ 732.466261] env[69328]: value = "task-3273083" [ 732.466261] env[69328]: _type = "Task" [ 732.466261] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.479088] env[69328]: DEBUG oslo_vmware.api [None req-0ed82f64-0f31-45d2-8136-b76441502164 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Task: {'id': task-3273083, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.545845] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Acquiring lock "07b1f872-02bc-471f-97d6-3a781075bee5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 732.546154] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Lock "07b1f872-02bc-471f-97d6-3a781075bee5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 732.592799] env[69328]: DEBUG oslo_concurrency.lockutils [None req-af1ea56b-1ba2-47fb-9021-e84f5b1f1c5b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Acquiring lock "e5d3df12-5334-44c8-9a44-1674e57918bb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 732.592950] env[69328]: DEBUG oslo_concurrency.lockutils [None req-af1ea56b-1ba2-47fb-9021-e84f5b1f1c5b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Lock "e5d3df12-5334-44c8-9a44-1674e57918bb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 732.593947] env[69328]: DEBUG oslo_concurrency.lockutils [None req-af1ea56b-1ba2-47fb-9021-e84f5b1f1c5b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Acquiring lock "e5d3df12-5334-44c8-9a44-1674e57918bb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 732.593947] env[69328]: DEBUG oslo_concurrency.lockutils [None req-af1ea56b-1ba2-47fb-9021-e84f5b1f1c5b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Lock "e5d3df12-5334-44c8-9a44-1674e57918bb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 732.593947] env[69328]: DEBUG oslo_concurrency.lockutils [None req-af1ea56b-1ba2-47fb-9021-e84f5b1f1c5b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Lock "e5d3df12-5334-44c8-9a44-1674e57918bb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 732.595638] env[69328]: INFO nova.compute.manager [None req-af1ea56b-1ba2-47fb-9021-e84f5b1f1c5b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] Terminating instance [ 732.641343] env[69328]: DEBUG oslo_vmware.api [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Task: {'id': task-3273082, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069292} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.641621] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 732.642904] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-149f12ad-807e-45ba-84c4-3999d7c19e06 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.646997] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fef54f43-7722-4310-b483-52b88a7a64b0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.662694] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-639e320d-1d0b-46c5-868a-dd5d3c95e0fb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.684204] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Reconfiguring VM instance instance-00000024 to attach disk [datastore2] 732342ea-2f73-40ea-a826-883ddc7a385a/732342ea-2f73-40ea-a826-883ddc7a385a.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 732.685288] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-29627c4a-240f-41d3-a8ba-5b43adb0c211 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.730340] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56552922-44a8-42e9-931a-2672766eeb05 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.736394] env[69328]: DEBUG oslo_vmware.api [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Waiting for the task: (returnval){ [ 732.736394] env[69328]: value = "task-3273084" [ 732.736394] env[69328]: _type = "Task" [ 732.736394] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.742624] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-885b6d5b-ebc6-4e4f-8cd8-c0cded160be4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.771516] env[69328]: DEBUG nova.compute.provider_tree [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 732.772788] env[69328]: DEBUG oslo_vmware.api [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Task: {'id': task-3273084, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.774425] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 732.916505] env[69328]: DEBUG oslo_vmware.api [None req-f7e862ec-1d8a-45e3-bd24-c67aaf7f2c3e tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3273081, 'name': RemoveSnapshot_Task, 'duration_secs': 0.914688} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.916505] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f7e862ec-1d8a-45e3-bd24-c67aaf7f2c3e tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Deleted Snapshot of the VM instance {{(pid=69328) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 732.916633] env[69328]: INFO nova.compute.manager [None req-f7e862ec-1d8a-45e3-bd24-c67aaf7f2c3e tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Took 15.06 seconds to snapshot the instance on the hypervisor. [ 732.980885] env[69328]: DEBUG oslo_vmware.api [None req-0ed82f64-0f31-45d2-8136-b76441502164 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Task: {'id': task-3273083, 'name': ReconfigVM_Task, 'duration_secs': 0.240043} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.981498] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0ed82f64-0f31-45d2-8136-b76441502164 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Reconfigured VM instance to set the machine id {{(pid=69328) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 733.035236] env[69328]: DEBUG nova.compute.manager [req-db5ade1a-6b4d-4396-94ac-6bc9c43b1fad req-048e45c4-0be7-4009-b9df-0228ff45dfec service nova] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Received event network-changed-a95af8f2-189b-449d-974d-b380402c6a3f {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 733.035428] env[69328]: DEBUG nova.compute.manager [req-db5ade1a-6b4d-4396-94ac-6bc9c43b1fad req-048e45c4-0be7-4009-b9df-0228ff45dfec service nova] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Refreshing instance network info cache due to event network-changed-a95af8f2-189b-449d-974d-b380402c6a3f. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 733.035664] env[69328]: DEBUG oslo_concurrency.lockutils [req-db5ade1a-6b4d-4396-94ac-6bc9c43b1fad req-048e45c4-0be7-4009-b9df-0228ff45dfec service nova] Acquiring lock "refresh_cache-46526210-2783-408d-9ecb-773f33ff0c66" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 733.035833] env[69328]: DEBUG oslo_concurrency.lockutils [req-db5ade1a-6b4d-4396-94ac-6bc9c43b1fad req-048e45c4-0be7-4009-b9df-0228ff45dfec service nova] Acquired lock "refresh_cache-46526210-2783-408d-9ecb-773f33ff0c66" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 733.036220] env[69328]: DEBUG nova.network.neutron [req-db5ade1a-6b4d-4396-94ac-6bc9c43b1fad req-048e45c4-0be7-4009-b9df-0228ff45dfec service nova] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Refreshing network info cache for port a95af8f2-189b-449d-974d-b380402c6a3f {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 733.102085] env[69328]: DEBUG nova.compute.manager [None req-af1ea56b-1ba2-47fb-9021-e84f5b1f1c5b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 733.102349] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-af1ea56b-1ba2-47fb-9021-e84f5b1f1c5b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 733.103487] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebb45c69-7aa7-4b13-9cc2-152d3184da96 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.114471] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-af1ea56b-1ba2-47fb-9021-e84f5b1f1c5b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 733.114792] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8397a0b6-42fd-40ba-ad69-65fabe51c726 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.122457] env[69328]: DEBUG oslo_vmware.api [None req-af1ea56b-1ba2-47fb-9021-e84f5b1f1c5b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Waiting for the task: (returnval){ [ 733.122457] env[69328]: value = "task-3273085" [ 733.122457] env[69328]: _type = "Task" [ 733.122457] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.133202] env[69328]: DEBUG oslo_vmware.api [None req-af1ea56b-1ba2-47fb-9021-e84f5b1f1c5b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Task: {'id': task-3273085, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.252022] env[69328]: DEBUG oslo_vmware.api [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Task: {'id': task-3273084, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.276885] env[69328]: DEBUG nova.scheduler.client.report [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 733.280265] env[69328]: DEBUG nova.network.neutron [-] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 733.446337] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f9e274ee-1c49-4fd0-91b5-055e0e13c669 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Acquiring lock "46526210-2783-408d-9ecb-773f33ff0c66" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 733.446337] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f9e274ee-1c49-4fd0-91b5-055e0e13c669 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Lock "46526210-2783-408d-9ecb-773f33ff0c66" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 733.446337] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f9e274ee-1c49-4fd0-91b5-055e0e13c669 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Acquiring lock "46526210-2783-408d-9ecb-773f33ff0c66-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 733.446337] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f9e274ee-1c49-4fd0-91b5-055e0e13c669 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Lock "46526210-2783-408d-9ecb-773f33ff0c66-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 733.446742] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f9e274ee-1c49-4fd0-91b5-055e0e13c669 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Lock "46526210-2783-408d-9ecb-773f33ff0c66-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 733.448097] env[69328]: INFO nova.compute.manager [None req-f9e274ee-1c49-4fd0-91b5-055e0e13c669 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Terminating instance [ 733.636991] env[69328]: DEBUG oslo_vmware.api [None req-af1ea56b-1ba2-47fb-9021-e84f5b1f1c5b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Task: {'id': task-3273085, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.755773] env[69328]: DEBUG oslo_vmware.api [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Task: {'id': task-3273084, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.782569] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.888s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 733.783184] env[69328]: DEBUG nova.compute.manager [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 733.785743] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.537s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 733.787110] env[69328]: INFO nova.compute.claims [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 733.790296] env[69328]: INFO nova.compute.manager [-] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] Took 1.63 seconds to deallocate network for instance. [ 733.874624] env[69328]: DEBUG nova.network.neutron [req-db5ade1a-6b4d-4396-94ac-6bc9c43b1fad req-048e45c4-0be7-4009-b9df-0228ff45dfec service nova] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Updated VIF entry in instance network info cache for port a95af8f2-189b-449d-974d-b380402c6a3f. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 733.874624] env[69328]: DEBUG nova.network.neutron [req-db5ade1a-6b4d-4396-94ac-6bc9c43b1fad req-048e45c4-0be7-4009-b9df-0228ff45dfec service nova] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Updating instance_info_cache with network_info: [{"id": "a95af8f2-189b-449d-974d-b380402c6a3f", "address": "fa:16:3e:2d:14:69", "network": {"id": "749d8822-e19e-4761-9e9f-f5717a49481a", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-136341088-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.135", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6895a4954cb4bc89dab40eb3f655606", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa95af8f2-18", "ovs_interfaceid": "a95af8f2-189b-449d-974d-b380402c6a3f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 733.955758] env[69328]: DEBUG nova.compute.manager [None req-f9e274ee-1c49-4fd0-91b5-055e0e13c669 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 733.956086] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f9e274ee-1c49-4fd0-91b5-055e0e13c669 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 733.957386] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bb772ee-d760-478e-9246-7ef9fffd7cb7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.967616] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9e274ee-1c49-4fd0-91b5-055e0e13c669 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 733.967765] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b2527719-200e-4dc0-adc1-67e7fcca116f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.977016] env[69328]: DEBUG oslo_vmware.api [None req-f9e274ee-1c49-4fd0-91b5-055e0e13c669 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Waiting for the task: (returnval){ [ 733.977016] env[69328]: value = "task-3273086" [ 733.977016] env[69328]: _type = "Task" [ 733.977016] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.985414] env[69328]: DEBUG oslo_vmware.api [None req-f9e274ee-1c49-4fd0-91b5-055e0e13c669 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Task: {'id': task-3273086, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.134495] env[69328]: DEBUG oslo_vmware.api [None req-af1ea56b-1ba2-47fb-9021-e84f5b1f1c5b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Task: {'id': task-3273085, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.257724] env[69328]: DEBUG oslo_vmware.api [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Task: {'id': task-3273084, 'name': ReconfigVM_Task, 'duration_secs': 1.363678} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.257724] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Reconfigured VM instance instance-00000024 to attach disk [datastore2] 732342ea-2f73-40ea-a826-883ddc7a385a/732342ea-2f73-40ea-a826-883ddc7a385a.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 734.258243] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-784a5286-3b09-4ca2-8c5a-8899f78e90a3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.266744] env[69328]: DEBUG oslo_vmware.api [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Waiting for the task: (returnval){ [ 734.266744] env[69328]: value = "task-3273087" [ 734.266744] env[69328]: _type = "Task" [ 734.266744] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.282034] env[69328]: DEBUG oslo_vmware.api [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Task: {'id': task-3273087, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.292672] env[69328]: DEBUG nova.compute.utils [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 734.296323] env[69328]: DEBUG nova.compute.manager [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 734.296491] env[69328]: DEBUG nova.network.neutron [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 734.299424] env[69328]: DEBUG oslo_concurrency.lockutils [None req-76f2b362-e0df-4234-aef2-d298bd07cb45 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 734.363631] env[69328]: DEBUG nova.policy [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ca4aa5826fac47c3bdbc3e8a422f1177', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '65edf1e9f4344038878d05021bbdef78', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 734.380708] env[69328]: DEBUG oslo_concurrency.lockutils [req-db5ade1a-6b4d-4396-94ac-6bc9c43b1fad req-048e45c4-0be7-4009-b9df-0228ff45dfec service nova] Releasing lock "refresh_cache-46526210-2783-408d-9ecb-773f33ff0c66" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 734.381057] env[69328]: DEBUG nova.compute.manager [req-db5ade1a-6b4d-4396-94ac-6bc9c43b1fad req-048e45c4-0be7-4009-b9df-0228ff45dfec service nova] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] Received event network-vif-deleted-a1ba4448-3cab-4866-81f4-785bd26580b2 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 734.381291] env[69328]: INFO nova.compute.manager [req-db5ade1a-6b4d-4396-94ac-6bc9c43b1fad req-048e45c4-0be7-4009-b9df-0228ff45dfec service nova] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] Neutron deleted interface a1ba4448-3cab-4866-81f4-785bd26580b2; detaching it from the instance and deleting it from the info cache [ 734.381524] env[69328]: DEBUG nova.network.neutron [req-db5ade1a-6b4d-4396-94ac-6bc9c43b1fad req-048e45c4-0be7-4009-b9df-0228ff45dfec service nova] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 734.486284] env[69328]: DEBUG oslo_vmware.api [None req-f9e274ee-1c49-4fd0-91b5-055e0e13c669 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Task: {'id': task-3273086, 'name': PowerOffVM_Task, 'duration_secs': 0.279902} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.486608] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9e274ee-1c49-4fd0-91b5-055e0e13c669 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 734.486704] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f9e274ee-1c49-4fd0-91b5-055e0e13c669 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 734.486966] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-86899e73-6c54-4f95-b60a-a447330bda75 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.556207] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f9e274ee-1c49-4fd0-91b5-055e0e13c669 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 734.559024] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f9e274ee-1c49-4fd0-91b5-055e0e13c669 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 734.559024] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9e274ee-1c49-4fd0-91b5-055e0e13c669 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Deleting the datastore file [datastore1] 46526210-2783-408d-9ecb-773f33ff0c66 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 734.559024] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-021f7beb-a109-4dd4-b530-04f1cf3cca53 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.564926] env[69328]: DEBUG oslo_vmware.api [None req-f9e274ee-1c49-4fd0-91b5-055e0e13c669 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Waiting for the task: (returnval){ [ 734.564926] env[69328]: value = "task-3273089" [ 734.564926] env[69328]: _type = "Task" [ 734.564926] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.573522] env[69328]: DEBUG oslo_vmware.api [None req-f9e274ee-1c49-4fd0-91b5-055e0e13c669 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Task: {'id': task-3273089, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.635502] env[69328]: DEBUG oslo_vmware.api [None req-af1ea56b-1ba2-47fb-9021-e84f5b1f1c5b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Task: {'id': task-3273085, 'name': PowerOffVM_Task, 'duration_secs': 1.132678} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.635889] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-af1ea56b-1ba2-47fb-9021-e84f5b1f1c5b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 734.636184] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-af1ea56b-1ba2-47fb-9021-e84f5b1f1c5b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 734.636589] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0320c58f-a6f6-40c7-949d-c0aa1346b195 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.710141] env[69328]: DEBUG nova.network.neutron [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Successfully created port: 1b80d882-8edf-4d06-a91a-6568cce981e8 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 734.723873] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-af1ea56b-1ba2-47fb-9021-e84f5b1f1c5b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 734.723873] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-af1ea56b-1ba2-47fb-9021-e84f5b1f1c5b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 734.723873] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-af1ea56b-1ba2-47fb-9021-e84f5b1f1c5b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Deleting the datastore file [datastore1] e5d3df12-5334-44c8-9a44-1674e57918bb {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 734.724139] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-38886969-e14c-4d1f-a2c8-4669822d4d9f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.731602] env[69328]: DEBUG oslo_vmware.api [None req-af1ea56b-1ba2-47fb-9021-e84f5b1f1c5b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Waiting for the task: (returnval){ [ 734.731602] env[69328]: value = "task-3273091" [ 734.731602] env[69328]: _type = "Task" [ 734.731602] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.741324] env[69328]: DEBUG oslo_vmware.api [None req-af1ea56b-1ba2-47fb-9021-e84f5b1f1c5b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Task: {'id': task-3273091, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.779176] env[69328]: DEBUG oslo_vmware.api [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Task: {'id': task-3273087, 'name': Rename_Task, 'duration_secs': 0.146071} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.780158] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 734.780158] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-54dd8648-aa50-4f03-8a54-d444f6bc22c5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.787989] env[69328]: DEBUG oslo_vmware.api [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Waiting for the task: (returnval){ [ 734.787989] env[69328]: value = "task-3273092" [ 734.787989] env[69328]: _type = "Task" [ 734.787989] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.801539] env[69328]: DEBUG nova.compute.manager [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 734.807376] env[69328]: DEBUG oslo_vmware.api [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Task: {'id': task-3273092, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.885599] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3edc6047-b253-4bb7-aa62-6147c37c07ea {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.900048] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e08e5bd1-41dc-4968-b14b-5654d054c0a9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.938612] env[69328]: DEBUG nova.compute.manager [req-db5ade1a-6b4d-4396-94ac-6bc9c43b1fad req-048e45c4-0be7-4009-b9df-0228ff45dfec service nova] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] Detach interface failed, port_id=a1ba4448-3cab-4866-81f4-785bd26580b2, reason: Instance 4c54c0dd-32f1-4d35-b770-3e1a540c54a7 could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 735.077268] env[69328]: DEBUG oslo_vmware.api [None req-f9e274ee-1c49-4fd0-91b5-055e0e13c669 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Task: {'id': task-3273089, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150233} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.077544] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9e274ee-1c49-4fd0-91b5-055e0e13c669 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 735.077731] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f9e274ee-1c49-4fd0-91b5-055e0e13c669 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 735.077932] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f9e274ee-1c49-4fd0-91b5-055e0e13c669 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 735.078159] env[69328]: INFO nova.compute.manager [None req-f9e274ee-1c49-4fd0-91b5-055e0e13c669 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Took 1.12 seconds to destroy the instance on the hypervisor. [ 735.078407] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f9e274ee-1c49-4fd0-91b5-055e0e13c669 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 735.078615] env[69328]: DEBUG nova.compute.manager [-] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 735.078758] env[69328]: DEBUG nova.network.neutron [-] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 735.246636] env[69328]: DEBUG oslo_vmware.api [None req-af1ea56b-1ba2-47fb-9021-e84f5b1f1c5b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Task: {'id': task-3273091, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.134575} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.246905] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-af1ea56b-1ba2-47fb-9021-e84f5b1f1c5b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 735.247813] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-af1ea56b-1ba2-47fb-9021-e84f5b1f1c5b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 735.247813] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-af1ea56b-1ba2-47fb-9021-e84f5b1f1c5b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 735.247813] env[69328]: INFO nova.compute.manager [None req-af1ea56b-1ba2-47fb-9021-e84f5b1f1c5b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] Took 2.15 seconds to destroy the instance on the hypervisor. [ 735.247813] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-af1ea56b-1ba2-47fb-9021-e84f5b1f1c5b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 735.248957] env[69328]: DEBUG nova.compute.manager [-] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 735.248957] env[69328]: DEBUG nova.network.neutron [-] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 735.304920] env[69328]: DEBUG oslo_vmware.api [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Task: {'id': task-3273092, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.404100] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1024329b-a6b2-4406-ad1f-1777cb1fc054 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.413289] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0465ab76-9908-4c5f-a5f1-a7a73f219860 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.455078] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9100d258-0619-47d9-9ca8-ebb29f799267 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.464344] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94aef9d9-0996-4560-be7e-efb89ec72c44 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.485676] env[69328]: DEBUG nova.compute.provider_tree [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 735.804899] env[69328]: DEBUG oslo_vmware.api [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Task: {'id': task-3273092, 'name': PowerOnVM_Task, 'duration_secs': 0.564484} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.805535] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 735.805948] env[69328]: INFO nova.compute.manager [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Took 6.46 seconds to spawn the instance on the hypervisor. [ 735.806181] env[69328]: DEBUG nova.compute.manager [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 735.808496] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0171335-7438-4f4e-8646-3a2510694462 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.813437] env[69328]: DEBUG nova.compute.manager [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 735.841473] env[69328]: DEBUG nova.virt.hardware [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 735.841718] env[69328]: DEBUG nova.virt.hardware [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 735.841894] env[69328]: DEBUG nova.virt.hardware [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 735.842098] env[69328]: DEBUG nova.virt.hardware [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 735.842242] env[69328]: DEBUG nova.virt.hardware [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 735.842383] env[69328]: DEBUG nova.virt.hardware [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 735.842585] env[69328]: DEBUG nova.virt.hardware [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 735.842736] env[69328]: DEBUG nova.virt.hardware [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 735.842894] env[69328]: DEBUG nova.virt.hardware [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 735.843836] env[69328]: DEBUG nova.virt.hardware [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 735.844237] env[69328]: DEBUG nova.virt.hardware [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 735.847786] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a781bd8f-e68e-46d7-a291-e32749822522 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.863630] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd0c70dd-ba98-4452-8db6-6a9e632df28b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.940664] env[69328]: DEBUG nova.compute.manager [req-49f2f2a0-70bd-4858-9225-b4db589b9f71 req-6fd1b25d-3e38-48f1-a415-341da97e141b service nova] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] Received event network-vif-deleted-c9eb2309-c7e0-43ff-91b1-763055d9381a {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 735.941453] env[69328]: INFO nova.compute.manager [req-49f2f2a0-70bd-4858-9225-b4db589b9f71 req-6fd1b25d-3e38-48f1-a415-341da97e141b service nova] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] Neutron deleted interface c9eb2309-c7e0-43ff-91b1-763055d9381a; detaching it from the instance and deleting it from the info cache [ 735.941789] env[69328]: DEBUG nova.network.neutron [req-49f2f2a0-70bd-4858-9225-b4db589b9f71 req-6fd1b25d-3e38-48f1-a415-341da97e141b service nova] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 735.990878] env[69328]: DEBUG nova.scheduler.client.report [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 736.042269] env[69328]: DEBUG nova.compute.manager [req-dffbc31a-23e0-4e00-a8a5-7f9dbc425c45 req-27465bcb-fbe4-4844-ace1-9724d6d532de service nova] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Received event network-vif-deleted-a95af8f2-189b-449d-974d-b380402c6a3f {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 736.042702] env[69328]: INFO nova.compute.manager [req-dffbc31a-23e0-4e00-a8a5-7f9dbc425c45 req-27465bcb-fbe4-4844-ace1-9724d6d532de service nova] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Neutron deleted interface a95af8f2-189b-449d-974d-b380402c6a3f; detaching it from the instance and deleting it from the info cache [ 736.042702] env[69328]: DEBUG nova.network.neutron [req-dffbc31a-23e0-4e00-a8a5-7f9dbc425c45 req-27465bcb-fbe4-4844-ace1-9724d6d532de service nova] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 736.286516] env[69328]: DEBUG nova.network.neutron [-] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 736.329442] env[69328]: DEBUG nova.network.neutron [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Successfully updated port: 1b80d882-8edf-4d06-a91a-6568cce981e8 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 736.331078] env[69328]: INFO nova.compute.manager [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Took 50.50 seconds to build instance. [ 736.369979] env[69328]: DEBUG nova.network.neutron [-] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 736.446172] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c3ae26e9-a475-477e-bd0e-3fc9bbc4935a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.461898] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-515ebae6-5960-47c4-8561-1fae89ed47b3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.506565] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.721s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 736.508021] env[69328]: DEBUG nova.compute.manager [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 736.511065] env[69328]: DEBUG nova.compute.manager [req-49f2f2a0-70bd-4858-9225-b4db589b9f71 req-6fd1b25d-3e38-48f1-a415-341da97e141b service nova] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] Detach interface failed, port_id=c9eb2309-c7e0-43ff-91b1-763055d9381a, reason: Instance e5d3df12-5334-44c8-9a44-1674e57918bb could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 736.511522] env[69328]: DEBUG oslo_concurrency.lockutils [None req-af64df0c-6b85-49fd-a4d1-e6f242033274 tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.011s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 736.511724] env[69328]: DEBUG nova.objects.instance [None req-af64df0c-6b85-49fd-a4d1-e6f242033274 tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Lazy-loading 'resources' on Instance uuid b7409a67-c140-436f-9c4e-27dae259f648 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 736.549034] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1a3e26b5-3763-4ccc-af4f-bec92a2b86f3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.560831] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30b3a009-5048-47bd-a8dc-41328658e692 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.596915] env[69328]: DEBUG nova.compute.manager [req-dffbc31a-23e0-4e00-a8a5-7f9dbc425c45 req-27465bcb-fbe4-4844-ace1-9724d6d532de service nova] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Detach interface failed, port_id=a95af8f2-189b-449d-974d-b380402c6a3f, reason: Instance 46526210-2783-408d-9ecb-773f33ff0c66 could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 736.790952] env[69328]: INFO nova.compute.manager [-] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] Took 1.54 seconds to deallocate network for instance. [ 736.834267] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Acquiring lock "refresh_cache-3923403b-2e8f-4033-89ee-9a907aff1d49" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 736.834553] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Acquired lock "refresh_cache-3923403b-2e8f-4033-89ee-9a907aff1d49" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 736.834656] env[69328]: DEBUG nova.network.neutron [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 736.835896] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f67ea4ff-c329-48ed-8668-03914fe9ddc9 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Lock "732342ea-2f73-40ea-a826-883ddc7a385a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 89.106s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 736.872234] env[69328]: INFO nova.compute.manager [-] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Took 1.79 seconds to deallocate network for instance. [ 737.014579] env[69328]: DEBUG nova.compute.utils [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 737.018754] env[69328]: DEBUG nova.compute.manager [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 737.019116] env[69328]: DEBUG nova.network.neutron [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 737.092022] env[69328]: DEBUG nova.policy [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '43be625728f24af5a2f6a650279d689d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cdc479a290524130b9d17e627a64b65a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 737.299254] env[69328]: DEBUG oslo_concurrency.lockutils [None req-af1ea56b-1ba2-47fb-9021-e84f5b1f1c5b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 737.342768] env[69328]: DEBUG nova.compute.manager [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 737.378468] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f9e274ee-1c49-4fd0-91b5-055e0e13c669 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 737.405711] env[69328]: DEBUG nova.network.neutron [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 737.436113] env[69328]: DEBUG oslo_concurrency.lockutils [None req-51927333-290d-4571-bbf0-9e7e32968770 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Acquiring lock "bbbfb48d-b474-4a6e-9078-336f23d2c343" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 737.436197] env[69328]: DEBUG oslo_concurrency.lockutils [None req-51927333-290d-4571-bbf0-9e7e32968770 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Lock "bbbfb48d-b474-4a6e-9078-336f23d2c343" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 737.437073] env[69328]: DEBUG oslo_concurrency.lockutils [None req-51927333-290d-4571-bbf0-9e7e32968770 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Acquiring lock "bbbfb48d-b474-4a6e-9078-336f23d2c343-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 737.437073] env[69328]: DEBUG oslo_concurrency.lockutils [None req-51927333-290d-4571-bbf0-9e7e32968770 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Lock "bbbfb48d-b474-4a6e-9078-336f23d2c343-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 737.437073] env[69328]: DEBUG oslo_concurrency.lockutils [None req-51927333-290d-4571-bbf0-9e7e32968770 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Lock "bbbfb48d-b474-4a6e-9078-336f23d2c343-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 737.446013] env[69328]: INFO nova.compute.manager [None req-51927333-290d-4571-bbf0-9e7e32968770 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Terminating instance [ 737.521601] env[69328]: DEBUG nova.compute.manager [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 737.633112] env[69328]: INFO nova.compute.manager [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Rebuilding instance [ 737.673020] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32ea8a5d-764e-4357-acd3-de3873b01a4a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.687055] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f08962a5-1a20-4ac3-aabb-8c0b089e1c36 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.691012] env[69328]: DEBUG nova.network.neutron [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Successfully created port: bdb8bb73-99e9-4ac7-95a7-50bf7fd362a7 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 737.698035] env[69328]: DEBUG nova.compute.manager [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 737.698035] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e53c2160-ba90-41cc-827e-10cbed11b349 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.729164] env[69328]: DEBUG nova.network.neutron [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Updating instance_info_cache with network_info: [{"id": "1b80d882-8edf-4d06-a91a-6568cce981e8", "address": "fa:16:3e:32:ef:98", "network": {"id": "2060ab72-61b3-4aea-bcdd-0b8a76a11fc7", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1995944610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65edf1e9f4344038878d05021bbdef78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b80d882-8e", "ovs_interfaceid": "1b80d882-8edf-4d06-a91a-6568cce981e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 737.731387] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89561f68-66ad-4bda-a7d3-db95448773b0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.745646] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df6deb7b-6924-45cc-8dad-f302eeb14644 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.764471] env[69328]: DEBUG nova.compute.provider_tree [None req-af64df0c-6b85-49fd-a4d1-e6f242033274 tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 737.867673] env[69328]: DEBUG oslo_concurrency.lockutils [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 737.956975] env[69328]: DEBUG nova.compute.manager [None req-51927333-290d-4571-bbf0-9e7e32968770 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 737.957286] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-51927333-290d-4571-bbf0-9e7e32968770 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 737.958181] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3857f63-dd00-4e65-bb3a-9adca8e1ac85 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.967028] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-51927333-290d-4571-bbf0-9e7e32968770 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 737.967028] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-648b8912-e536-45ff-9351-a6e0564e2cc3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.975042] env[69328]: DEBUG oslo_vmware.api [None req-51927333-290d-4571-bbf0-9e7e32968770 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Waiting for the task: (returnval){ [ 737.975042] env[69328]: value = "task-3273093" [ 737.975042] env[69328]: _type = "Task" [ 737.975042] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.986035] env[69328]: DEBUG oslo_vmware.api [None req-51927333-290d-4571-bbf0-9e7e32968770 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3273093, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.149311] env[69328]: DEBUG nova.compute.manager [req-26cc8730-835a-4798-869f-9396074e3c06 req-32f7c878-f664-404f-92ac-2c57bc87bb46 service nova] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Received event network-vif-plugged-1b80d882-8edf-4d06-a91a-6568cce981e8 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 738.149311] env[69328]: DEBUG oslo_concurrency.lockutils [req-26cc8730-835a-4798-869f-9396074e3c06 req-32f7c878-f664-404f-92ac-2c57bc87bb46 service nova] Acquiring lock "3923403b-2e8f-4033-89ee-9a907aff1d49-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 738.149311] env[69328]: DEBUG oslo_concurrency.lockutils [req-26cc8730-835a-4798-869f-9396074e3c06 req-32f7c878-f664-404f-92ac-2c57bc87bb46 service nova] Lock "3923403b-2e8f-4033-89ee-9a907aff1d49-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 738.149311] env[69328]: DEBUG oslo_concurrency.lockutils [req-26cc8730-835a-4798-869f-9396074e3c06 req-32f7c878-f664-404f-92ac-2c57bc87bb46 service nova] Lock "3923403b-2e8f-4033-89ee-9a907aff1d49-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 738.149311] env[69328]: DEBUG nova.compute.manager [req-26cc8730-835a-4798-869f-9396074e3c06 req-32f7c878-f664-404f-92ac-2c57bc87bb46 service nova] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] No waiting events found dispatching network-vif-plugged-1b80d882-8edf-4d06-a91a-6568cce981e8 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 738.149701] env[69328]: WARNING nova.compute.manager [req-26cc8730-835a-4798-869f-9396074e3c06 req-32f7c878-f664-404f-92ac-2c57bc87bb46 service nova] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Received unexpected event network-vif-plugged-1b80d882-8edf-4d06-a91a-6568cce981e8 for instance with vm_state building and task_state spawning. [ 738.149701] env[69328]: DEBUG nova.compute.manager [req-26cc8730-835a-4798-869f-9396074e3c06 req-32f7c878-f664-404f-92ac-2c57bc87bb46 service nova] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Received event network-changed-1b80d882-8edf-4d06-a91a-6568cce981e8 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 738.149701] env[69328]: DEBUG nova.compute.manager [req-26cc8730-835a-4798-869f-9396074e3c06 req-32f7c878-f664-404f-92ac-2c57bc87bb46 service nova] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Refreshing instance network info cache due to event network-changed-1b80d882-8edf-4d06-a91a-6568cce981e8. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 738.152049] env[69328]: DEBUG oslo_concurrency.lockutils [req-26cc8730-835a-4798-869f-9396074e3c06 req-32f7c878-f664-404f-92ac-2c57bc87bb46 service nova] Acquiring lock "refresh_cache-3923403b-2e8f-4033-89ee-9a907aff1d49" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 738.197368] env[69328]: DEBUG oslo_concurrency.lockutils [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Acquiring lock "ef7effe4-b37f-4fab-ad24-9d8f72a47ee2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 738.197368] env[69328]: DEBUG oslo_concurrency.lockutils [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Lock "ef7effe4-b37f-4fab-ad24-9d8f72a47ee2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 738.239413] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Releasing lock "refresh_cache-3923403b-2e8f-4033-89ee-9a907aff1d49" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 738.239765] env[69328]: DEBUG nova.compute.manager [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Instance network_info: |[{"id": "1b80d882-8edf-4d06-a91a-6568cce981e8", "address": "fa:16:3e:32:ef:98", "network": {"id": "2060ab72-61b3-4aea-bcdd-0b8a76a11fc7", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1995944610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65edf1e9f4344038878d05021bbdef78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b80d882-8e", "ovs_interfaceid": "1b80d882-8edf-4d06-a91a-6568cce981e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 738.241818] env[69328]: DEBUG oslo_concurrency.lockutils [req-26cc8730-835a-4798-869f-9396074e3c06 req-32f7c878-f664-404f-92ac-2c57bc87bb46 service nova] Acquired lock "refresh_cache-3923403b-2e8f-4033-89ee-9a907aff1d49" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 738.242118] env[69328]: DEBUG nova.network.neutron [req-26cc8730-835a-4798-869f-9396074e3c06 req-32f7c878-f664-404f-92ac-2c57bc87bb46 service nova] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Refreshing network info cache for port 1b80d882-8edf-4d06-a91a-6568cce981e8 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 738.243252] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:32:ef:98', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '41278529-8bd2-44a1-97c8-03967faa3ff7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1b80d882-8edf-4d06-a91a-6568cce981e8', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 738.250897] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 738.254936] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 738.255236] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 738.255697] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-626265c6-3085-4eeb-b76a-7d6762678601 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.257458] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dac7576b-bcd5-435e-b6cd-dd9a9c4e8bf5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.273345] env[69328]: DEBUG nova.scheduler.client.report [None req-af64df0c-6b85-49fd-a4d1-e6f242033274 tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 738.284785] env[69328]: DEBUG oslo_vmware.api [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Waiting for the task: (returnval){ [ 738.284785] env[69328]: value = "task-3273094" [ 738.284785] env[69328]: _type = "Task" [ 738.284785] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.285169] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 738.285169] env[69328]: value = "task-3273095" [ 738.285169] env[69328]: _type = "Task" [ 738.285169] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.298525] env[69328]: DEBUG oslo_vmware.api [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Task: {'id': task-3273094, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.303458] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273095, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.486295] env[69328]: DEBUG oslo_vmware.api [None req-51927333-290d-4571-bbf0-9e7e32968770 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3273093, 'name': PowerOffVM_Task, 'duration_secs': 0.180849} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.486664] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-51927333-290d-4571-bbf0-9e7e32968770 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 738.486851] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-51927333-290d-4571-bbf0-9e7e32968770 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 738.487146] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bbfb5018-4950-481b-b7ec-c1c1e8c667bb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.534793] env[69328]: DEBUG nova.compute.manager [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 738.551806] env[69328]: DEBUG nova.network.neutron [req-26cc8730-835a-4798-869f-9396074e3c06 req-32f7c878-f664-404f-92ac-2c57bc87bb46 service nova] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Updated VIF entry in instance network info cache for port 1b80d882-8edf-4d06-a91a-6568cce981e8. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 738.552371] env[69328]: DEBUG nova.network.neutron [req-26cc8730-835a-4798-869f-9396074e3c06 req-32f7c878-f664-404f-92ac-2c57bc87bb46 service nova] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Updating instance_info_cache with network_info: [{"id": "1b80d882-8edf-4d06-a91a-6568cce981e8", "address": "fa:16:3e:32:ef:98", "network": {"id": "2060ab72-61b3-4aea-bcdd-0b8a76a11fc7", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1995944610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65edf1e9f4344038878d05021bbdef78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b80d882-8e", "ovs_interfaceid": "1b80d882-8edf-4d06-a91a-6568cce981e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 738.560225] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-51927333-290d-4571-bbf0-9e7e32968770 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 738.560457] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-51927333-290d-4571-bbf0-9e7e32968770 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 738.560638] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-51927333-290d-4571-bbf0-9e7e32968770 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Deleting the datastore file [datastore2] bbbfb48d-b474-4a6e-9078-336f23d2c343 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 738.560912] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-64ee8b05-7bd3-4bd4-9545-92a0ec695616 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.571656] env[69328]: DEBUG oslo_vmware.api [None req-51927333-290d-4571-bbf0-9e7e32968770 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Waiting for the task: (returnval){ [ 738.571656] env[69328]: value = "task-3273097" [ 738.571656] env[69328]: _type = "Task" [ 738.571656] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.575202] env[69328]: DEBUG nova.virt.hardware [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 738.575202] env[69328]: DEBUG nova.virt.hardware [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 738.575202] env[69328]: DEBUG nova.virt.hardware [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 738.575414] env[69328]: DEBUG nova.virt.hardware [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 738.575478] env[69328]: DEBUG nova.virt.hardware [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 738.575632] env[69328]: DEBUG nova.virt.hardware [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 738.575849] env[69328]: DEBUG nova.virt.hardware [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 738.576059] env[69328]: DEBUG nova.virt.hardware [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 738.576256] env[69328]: DEBUG nova.virt.hardware [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 738.576429] env[69328]: DEBUG nova.virt.hardware [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 738.576918] env[69328]: DEBUG nova.virt.hardware [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 738.577772] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-885173d6-1f15-4937-a7f4-405388df7850 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.600228] env[69328]: DEBUG oslo_vmware.api [None req-51927333-290d-4571-bbf0-9e7e32968770 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3273097, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.601663] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-089dc6ee-0abb-4f82-809a-937f45d8c969 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.778856] env[69328]: DEBUG oslo_concurrency.lockutils [None req-af64df0c-6b85-49fd-a4d1-e6f242033274 tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.267s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 738.782323] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.718s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 738.784942] env[69328]: INFO nova.compute.claims [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 5292b759-9d1f-486a-b4d6-90519b3ae986] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 738.799610] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273095, 'name': CreateVM_Task} progress is 25%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.803616] env[69328]: DEBUG oslo_vmware.api [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Task: {'id': task-3273094, 'name': PowerOffVM_Task, 'duration_secs': 0.180597} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.803616] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 738.803616] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 738.804414] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73b119de-9e92-4798-9236-7e319415a6c2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.808339] env[69328]: INFO nova.scheduler.client.report [None req-af64df0c-6b85-49fd-a4d1-e6f242033274 tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Deleted allocations for instance b7409a67-c140-436f-9c4e-27dae259f648 [ 738.815372] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 738.815618] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b1ec2e28-17bc-4198-bf1d-3c545b323014 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.846196] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 738.846196] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 738.846196] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Deleting the datastore file [datastore2] 732342ea-2f73-40ea-a826-883ddc7a385a {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 738.846196] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dcf65a8b-c890-408d-9b08-75bf4f3f05e9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.856455] env[69328]: DEBUG oslo_vmware.api [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Waiting for the task: (returnval){ [ 738.856455] env[69328]: value = "task-3273099" [ 738.856455] env[69328]: _type = "Task" [ 738.856455] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.867353] env[69328]: DEBUG oslo_vmware.api [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Task: {'id': task-3273099, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.055396] env[69328]: DEBUG oslo_concurrency.lockutils [req-26cc8730-835a-4798-869f-9396074e3c06 req-32f7c878-f664-404f-92ac-2c57bc87bb46 service nova] Releasing lock "refresh_cache-3923403b-2e8f-4033-89ee-9a907aff1d49" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 739.088052] env[69328]: DEBUG oslo_vmware.api [None req-51927333-290d-4571-bbf0-9e7e32968770 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3273097, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.138036} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.088329] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-51927333-290d-4571-bbf0-9e7e32968770 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 739.088507] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-51927333-290d-4571-bbf0-9e7e32968770 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 739.088679] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-51927333-290d-4571-bbf0-9e7e32968770 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 739.090177] env[69328]: INFO nova.compute.manager [None req-51927333-290d-4571-bbf0-9e7e32968770 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Took 1.13 seconds to destroy the instance on the hypervisor. [ 739.090177] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-51927333-290d-4571-bbf0-9e7e32968770 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 739.090177] env[69328]: DEBUG nova.compute.manager [-] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 739.090177] env[69328]: DEBUG nova.network.neutron [-] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 739.299573] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273095, 'name': CreateVM_Task, 'duration_secs': 0.757151} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.299746] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 739.300433] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.300694] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 739.300928] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 739.301294] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27fd906c-ac17-4d96-9db7-2a598fda50d7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.306198] env[69328]: DEBUG oslo_vmware.api [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Waiting for the task: (returnval){ [ 739.306198] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]524bb344-f004-9e90-582c-d0c2641bc59e" [ 739.306198] env[69328]: _type = "Task" [ 739.306198] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.317337] env[69328]: DEBUG oslo_vmware.api [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]524bb344-f004-9e90-582c-d0c2641bc59e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.320254] env[69328]: DEBUG oslo_concurrency.lockutils [None req-af64df0c-6b85-49fd-a4d1-e6f242033274 tempest-ServerMetadataTestJSON-1361883546 tempest-ServerMetadataTestJSON-1361883546-project-member] Lock "b7409a67-c140-436f-9c4e-27dae259f648" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.833s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 739.366687] env[69328]: DEBUG oslo_vmware.api [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Task: {'id': task-3273099, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.092416} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.366956] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 739.367154] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 739.367330] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 739.521182] env[69328]: DEBUG nova.network.neutron [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Successfully updated port: bdb8bb73-99e9-4ac7-95a7-50bf7fd362a7 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 739.826862] env[69328]: DEBUG oslo_vmware.api [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]524bb344-f004-9e90-582c-d0c2641bc59e, 'name': SearchDatastore_Task, 'duration_secs': 0.023596} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.831672] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 739.831964] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 739.832232] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.832425] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 739.832609] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 739.833038] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-00fc50c2-94ef-4b1e-a348-9e30cdeeaa4b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.843422] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 739.843422] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 739.844127] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7efc5829-80c6-4012-8a35-49d2c5781652 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.857711] env[69328]: DEBUG oslo_vmware.api [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Waiting for the task: (returnval){ [ 739.857711] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b26545-1059-98d1-77f0-5e901b3d0001" [ 739.857711] env[69328]: _type = "Task" [ 739.857711] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.877148] env[69328]: DEBUG oslo_vmware.api [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b26545-1059-98d1-77f0-5e901b3d0001, 'name': SearchDatastore_Task, 'duration_secs': 0.010313} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.877148] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-415f7313-ab22-414c-8bd8-15e04f01c7d7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.886182] env[69328]: DEBUG oslo_vmware.api [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Waiting for the task: (returnval){ [ 739.886182] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e80984-92c5-367b-8769-b8bc97b17e76" [ 739.886182] env[69328]: _type = "Task" [ 739.886182] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.894567] env[69328]: DEBUG oslo_vmware.api [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e80984-92c5-367b-8769-b8bc97b17e76, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.013268] env[69328]: DEBUG nova.network.neutron [-] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.024034] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "refresh_cache-c465c53f-d96b-461b-b8ff-b19929b4f789" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 740.024268] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquired lock "refresh_cache-c465c53f-d96b-461b-b8ff-b19929b4f789" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 740.024430] env[69328]: DEBUG nova.network.neutron [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 740.313889] env[69328]: DEBUG nova.compute.manager [req-007f60fb-c1ff-489a-ae39-e74b3512e4ca req-b6ec3ea6-c444-41d6-a4ad-90ce0f509b1c service nova] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Received event network-vif-plugged-bdb8bb73-99e9-4ac7-95a7-50bf7fd362a7 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 740.314241] env[69328]: DEBUG oslo_concurrency.lockutils [req-007f60fb-c1ff-489a-ae39-e74b3512e4ca req-b6ec3ea6-c444-41d6-a4ad-90ce0f509b1c service nova] Acquiring lock "c465c53f-d96b-461b-b8ff-b19929b4f789-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 740.314359] env[69328]: DEBUG oslo_concurrency.lockutils [req-007f60fb-c1ff-489a-ae39-e74b3512e4ca req-b6ec3ea6-c444-41d6-a4ad-90ce0f509b1c service nova] Lock "c465c53f-d96b-461b-b8ff-b19929b4f789-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 740.314507] env[69328]: DEBUG oslo_concurrency.lockutils [req-007f60fb-c1ff-489a-ae39-e74b3512e4ca req-b6ec3ea6-c444-41d6-a4ad-90ce0f509b1c service nova] Lock "c465c53f-d96b-461b-b8ff-b19929b4f789-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 740.314668] env[69328]: DEBUG nova.compute.manager [req-007f60fb-c1ff-489a-ae39-e74b3512e4ca req-b6ec3ea6-c444-41d6-a4ad-90ce0f509b1c service nova] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] No waiting events found dispatching network-vif-plugged-bdb8bb73-99e9-4ac7-95a7-50bf7fd362a7 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 740.314821] env[69328]: WARNING nova.compute.manager [req-007f60fb-c1ff-489a-ae39-e74b3512e4ca req-b6ec3ea6-c444-41d6-a4ad-90ce0f509b1c service nova] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Received unexpected event network-vif-plugged-bdb8bb73-99e9-4ac7-95a7-50bf7fd362a7 for instance with vm_state building and task_state spawning. [ 740.314976] env[69328]: DEBUG nova.compute.manager [req-007f60fb-c1ff-489a-ae39-e74b3512e4ca req-b6ec3ea6-c444-41d6-a4ad-90ce0f509b1c service nova] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Received event network-vif-deleted-f66bf51c-4ffe-4da8-a8d8-6b3db6ee56e0 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 740.315154] env[69328]: DEBUG nova.compute.manager [req-007f60fb-c1ff-489a-ae39-e74b3512e4ca req-b6ec3ea6-c444-41d6-a4ad-90ce0f509b1c service nova] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Received event network-changed-bdb8bb73-99e9-4ac7-95a7-50bf7fd362a7 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 740.315335] env[69328]: DEBUG nova.compute.manager [req-007f60fb-c1ff-489a-ae39-e74b3512e4ca req-b6ec3ea6-c444-41d6-a4ad-90ce0f509b1c service nova] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Refreshing instance network info cache due to event network-changed-bdb8bb73-99e9-4ac7-95a7-50bf7fd362a7. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 740.315455] env[69328]: DEBUG oslo_concurrency.lockutils [req-007f60fb-c1ff-489a-ae39-e74b3512e4ca req-b6ec3ea6-c444-41d6-a4ad-90ce0f509b1c service nova] Acquiring lock "refresh_cache-c465c53f-d96b-461b-b8ff-b19929b4f789" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 740.351021] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5b93cf7-33e0-41fb-99dc-f40e8f30b401 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.357845] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe6cc326-ae8c-468c-8caa-fb2e94f88d35 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.396585] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e2209aa-1243-4c96-aeca-b62c4a63ab4c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.410282] env[69328]: DEBUG oslo_vmware.api [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e80984-92c5-367b-8769-b8bc97b17e76, 'name': SearchDatastore_Task, 'duration_secs': 0.010067} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.413490] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 740.413772] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 3923403b-2e8f-4033-89ee-9a907aff1d49/3923403b-2e8f-4033-89ee-9a907aff1d49.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 740.414484] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d30d9a7d-d54d-442d-a04e-000a1a62cf44 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.422379] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9f5646b-856f-48bd-85b2-ec2087a82e06 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.436764] env[69328]: DEBUG nova.compute.provider_tree [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 740.441075] env[69328]: DEBUG nova.virt.hardware [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 740.441329] env[69328]: DEBUG nova.virt.hardware [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 740.441569] env[69328]: DEBUG nova.virt.hardware [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 740.441654] env[69328]: DEBUG nova.virt.hardware [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 740.441797] env[69328]: DEBUG nova.virt.hardware [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 740.441967] env[69328]: DEBUG nova.virt.hardware [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 740.442223] env[69328]: DEBUG nova.virt.hardware [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 740.442384] env[69328]: DEBUG nova.virt.hardware [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 740.442549] env[69328]: DEBUG nova.virt.hardware [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 740.442705] env[69328]: DEBUG nova.virt.hardware [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 740.442874] env[69328]: DEBUG nova.virt.hardware [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 740.443381] env[69328]: DEBUG oslo_vmware.api [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Waiting for the task: (returnval){ [ 740.443381] env[69328]: value = "task-3273100" [ 740.443381] env[69328]: _type = "Task" [ 740.443381] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.445636] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a48c538b-43d6-4cec-8047-662a9833182f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.458633] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da26c8f1-39c4-4b5f-82ee-3ce3ca7a5f97 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.463293] env[69328]: DEBUG oslo_vmware.api [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Task: {'id': task-3273100, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.473458] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Instance VIF info [] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 740.479175] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 740.479386] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 740.479606] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0ebe8cc2-9752-45cc-83ee-a7e00806790c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.498176] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 740.498176] env[69328]: value = "task-3273101" [ 740.498176] env[69328]: _type = "Task" [ 740.498176] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.509048] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273101, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.515784] env[69328]: INFO nova.compute.manager [-] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Took 1.43 seconds to deallocate network for instance. [ 740.589257] env[69328]: DEBUG nova.network.neutron [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 740.855883] env[69328]: DEBUG nova.network.neutron [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Updating instance_info_cache with network_info: [{"id": "bdb8bb73-99e9-4ac7-95a7-50bf7fd362a7", "address": "fa:16:3e:30:7b:7f", "network": {"id": "620f277d-ca49-41da-83a0-afb8c393e26e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1223326239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cdc479a290524130b9d17e627a64b65a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86a35d07-53d3-46b3-92cb-ae34236c0f41", "external-id": "nsx-vlan-transportzone-811", "segmentation_id": 811, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbdb8bb73-99", "ovs_interfaceid": "bdb8bb73-99e9-4ac7-95a7-50bf7fd362a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.947895] env[69328]: DEBUG nova.scheduler.client.report [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 740.961916] env[69328]: DEBUG oslo_vmware.api [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Task: {'id': task-3273100, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.472503} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.962224] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 3923403b-2e8f-4033-89ee-9a907aff1d49/3923403b-2e8f-4033-89ee-9a907aff1d49.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 740.962477] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 740.962772] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-af15d2c0-c6ab-4b43-96c5-b724eef2dbc7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.971558] env[69328]: DEBUG oslo_vmware.api [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Waiting for the task: (returnval){ [ 740.971558] env[69328]: value = "task-3273102" [ 740.971558] env[69328]: _type = "Task" [ 740.971558] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.987021] env[69328]: DEBUG oslo_vmware.api [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Task: {'id': task-3273102, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.008119] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273101, 'name': CreateVM_Task, 'duration_secs': 0.321159} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.008265] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 741.008604] env[69328]: DEBUG oslo_concurrency.lockutils [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.008745] env[69328]: DEBUG oslo_concurrency.lockutils [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 741.011464] env[69328]: DEBUG oslo_concurrency.lockutils [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 741.011464] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1aaef263-618a-4110-89c9-084ea1e2ada5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.015124] env[69328]: DEBUG oslo_vmware.api [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Waiting for the task: (returnval){ [ 741.015124] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52777e8e-4426-4bf2-4c56-b7772a6a6366" [ 741.015124] env[69328]: _type = "Task" [ 741.015124] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.027023] env[69328]: DEBUG oslo_concurrency.lockutils [None req-51927333-290d-4571-bbf0-9e7e32968770 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 741.027023] env[69328]: DEBUG oslo_vmware.api [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52777e8e-4426-4bf2-4c56-b7772a6a6366, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.359131] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Releasing lock "refresh_cache-c465c53f-d96b-461b-b8ff-b19929b4f789" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 741.359454] env[69328]: DEBUG nova.compute.manager [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Instance network_info: |[{"id": "bdb8bb73-99e9-4ac7-95a7-50bf7fd362a7", "address": "fa:16:3e:30:7b:7f", "network": {"id": "620f277d-ca49-41da-83a0-afb8c393e26e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1223326239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cdc479a290524130b9d17e627a64b65a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86a35d07-53d3-46b3-92cb-ae34236c0f41", "external-id": "nsx-vlan-transportzone-811", "segmentation_id": 811, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbdb8bb73-99", "ovs_interfaceid": "bdb8bb73-99e9-4ac7-95a7-50bf7fd362a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 741.360459] env[69328]: DEBUG oslo_concurrency.lockutils [req-007f60fb-c1ff-489a-ae39-e74b3512e4ca req-b6ec3ea6-c444-41d6-a4ad-90ce0f509b1c service nova] Acquired lock "refresh_cache-c465c53f-d96b-461b-b8ff-b19929b4f789" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 741.360459] env[69328]: DEBUG nova.network.neutron [req-007f60fb-c1ff-489a-ae39-e74b3512e4ca req-b6ec3ea6-c444-41d6-a4ad-90ce0f509b1c service nova] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Refreshing network info cache for port bdb8bb73-99e9-4ac7-95a7-50bf7fd362a7 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 741.361298] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:30:7b:7f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '86a35d07-53d3-46b3-92cb-ae34236c0f41', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bdb8bb73-99e9-4ac7-95a7-50bf7fd362a7', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 741.370483] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 741.373603] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 741.374123] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-833a6144-673b-4b92-b443-c7b4da259de9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.395639] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 741.395639] env[69328]: value = "task-3273103" [ 741.395639] env[69328]: _type = "Task" [ 741.395639] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.405011] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273103, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.456791] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.675s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 741.457368] env[69328]: DEBUG nova.compute.manager [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 5292b759-9d1f-486a-b4d6-90519b3ae986] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 741.460100] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d9988052-6a38-48dd-86f7-6ef56deaba6f tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.171s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 741.460337] env[69328]: DEBUG nova.objects.instance [None req-d9988052-6a38-48dd-86f7-6ef56deaba6f tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Lazy-loading 'resources' on Instance uuid a798c3f2-ccde-488e-8a14-21f4a04f8e12 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 741.482787] env[69328]: DEBUG oslo_vmware.api [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Task: {'id': task-3273102, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074544} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.483337] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 741.484575] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09da4071-f496-493f-a30a-a6004c36a69f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.508871] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Reconfiguring VM instance instance-00000025 to attach disk [datastore1] 3923403b-2e8f-4033-89ee-9a907aff1d49/3923403b-2e8f-4033-89ee-9a907aff1d49.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 741.512127] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f02a4714-91ee-4ead-b49c-dad82f4dd9ec {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.538330] env[69328]: DEBUG oslo_vmware.api [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52777e8e-4426-4bf2-4c56-b7772a6a6366, 'name': SearchDatastore_Task, 'duration_secs': 0.012654} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.539773] env[69328]: DEBUG oslo_concurrency.lockutils [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 741.540059] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 741.540337] env[69328]: DEBUG oslo_concurrency.lockutils [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.540493] env[69328]: DEBUG oslo_concurrency.lockutils [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 741.541245] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 741.541596] env[69328]: DEBUG oslo_vmware.api [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Waiting for the task: (returnval){ [ 741.541596] env[69328]: value = "task-3273104" [ 741.541596] env[69328]: _type = "Task" [ 741.541596] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.541806] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b247805e-5277-4172-bf61-1bab6a0a0ae3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.557793] env[69328]: DEBUG oslo_vmware.api [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Task: {'id': task-3273104, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.558491] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 741.558674] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 741.559409] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4e89d13-ef34-4a4d-865f-d9af0a3e743d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.565858] env[69328]: DEBUG oslo_vmware.api [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Waiting for the task: (returnval){ [ 741.565858] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52913548-c986-19f0-3f78-b7eb1bf55626" [ 741.565858] env[69328]: _type = "Task" [ 741.565858] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.577394] env[69328]: DEBUG oslo_vmware.api [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52913548-c986-19f0-3f78-b7eb1bf55626, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.676772] env[69328]: DEBUG nova.network.neutron [req-007f60fb-c1ff-489a-ae39-e74b3512e4ca req-b6ec3ea6-c444-41d6-a4ad-90ce0f509b1c service nova] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Updated VIF entry in instance network info cache for port bdb8bb73-99e9-4ac7-95a7-50bf7fd362a7. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 741.676772] env[69328]: DEBUG nova.network.neutron [req-007f60fb-c1ff-489a-ae39-e74b3512e4ca req-b6ec3ea6-c444-41d6-a4ad-90ce0f509b1c service nova] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Updating instance_info_cache with network_info: [{"id": "bdb8bb73-99e9-4ac7-95a7-50bf7fd362a7", "address": "fa:16:3e:30:7b:7f", "network": {"id": "620f277d-ca49-41da-83a0-afb8c393e26e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1223326239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cdc479a290524130b9d17e627a64b65a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86a35d07-53d3-46b3-92cb-ae34236c0f41", "external-id": "nsx-vlan-transportzone-811", "segmentation_id": 811, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbdb8bb73-99", "ovs_interfaceid": "bdb8bb73-99e9-4ac7-95a7-50bf7fd362a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 741.906798] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273103, 'name': CreateVM_Task, 'duration_secs': 0.368703} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.906798] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 741.907124] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.907358] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 741.907608] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 741.907893] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2eac5ddc-4f69-44fb-b8aa-15d845b11fe1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.913589] env[69328]: DEBUG oslo_vmware.api [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 741.913589] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5220ee10-55f9-9a4b-e3d3-f168d62cd379" [ 741.913589] env[69328]: _type = "Task" [ 741.913589] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.921865] env[69328]: DEBUG oslo_vmware.api [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5220ee10-55f9-9a4b-e3d3-f168d62cd379, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.967456] env[69328]: DEBUG nova.compute.utils [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 741.975744] env[69328]: DEBUG nova.compute.manager [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 5292b759-9d1f-486a-b4d6-90519b3ae986] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 741.975948] env[69328]: DEBUG nova.network.neutron [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 5292b759-9d1f-486a-b4d6-90519b3ae986] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 742.025492] env[69328]: DEBUG nova.policy [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b896180d007146d9b95cb7b9aa75f8e2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a019831c6ff240f3bfbf2c2bff104788', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 742.055450] env[69328]: DEBUG oslo_vmware.api [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Task: {'id': task-3273104, 'name': ReconfigVM_Task, 'duration_secs': 0.37724} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.056128] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Reconfigured VM instance instance-00000025 to attach disk [datastore1] 3923403b-2e8f-4033-89ee-9a907aff1d49/3923403b-2e8f-4033-89ee-9a907aff1d49.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 742.061295] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9cf844b5-2a0f-431f-a876-da9b02fdcb8f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.074836] env[69328]: DEBUG oslo_vmware.api [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Waiting for the task: (returnval){ [ 742.074836] env[69328]: value = "task-3273105" [ 742.074836] env[69328]: _type = "Task" [ 742.074836] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.084777] env[69328]: DEBUG oslo_vmware.api [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52913548-c986-19f0-3f78-b7eb1bf55626, 'name': SearchDatastore_Task, 'duration_secs': 0.010518} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.086271] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45165856-4e3f-4036-a430-13883f050b59 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.092079] env[69328]: DEBUG oslo_vmware.api [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Task: {'id': task-3273105, 'name': Rename_Task} progress is 10%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.095724] env[69328]: DEBUG oslo_vmware.api [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Waiting for the task: (returnval){ [ 742.095724] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5210a88a-8939-051e-c913-399e0d5c53d5" [ 742.095724] env[69328]: _type = "Task" [ 742.095724] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.104752] env[69328]: DEBUG oslo_vmware.api [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5210a88a-8939-051e-c913-399e0d5c53d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.183649] env[69328]: DEBUG oslo_concurrency.lockutils [req-007f60fb-c1ff-489a-ae39-e74b3512e4ca req-b6ec3ea6-c444-41d6-a4ad-90ce0f509b1c service nova] Releasing lock "refresh_cache-c465c53f-d96b-461b-b8ff-b19929b4f789" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 742.426751] env[69328]: DEBUG oslo_vmware.api [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5220ee10-55f9-9a4b-e3d3-f168d62cd379, 'name': SearchDatastore_Task, 'duration_secs': 0.009845} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.427108] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 742.427295] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 742.427569] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 742.427747] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 742.427930] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 742.428216] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f1b218c5-487c-4799-88de-b4d11b21c1d7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.439746] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 742.440655] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 742.440655] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b33392f5-3a10-48fc-80ab-56261da4a0fc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.446946] env[69328]: DEBUG oslo_vmware.api [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 742.446946] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52c57276-241c-d42a-30c4-75a3348d570e" [ 742.446946] env[69328]: _type = "Task" [ 742.446946] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.458996] env[69328]: DEBUG oslo_vmware.api [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52c57276-241c-d42a-30c4-75a3348d570e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.470676] env[69328]: DEBUG nova.network.neutron [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 5292b759-9d1f-486a-b4d6-90519b3ae986] Successfully created port: b53cf739-4e56-4bd3-85f1-7e758b465dd8 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 742.478099] env[69328]: DEBUG nova.compute.manager [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 5292b759-9d1f-486a-b4d6-90519b3ae986] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 742.588805] env[69328]: DEBUG oslo_vmware.api [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Task: {'id': task-3273105, 'name': Rename_Task, 'duration_secs': 0.234901} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.589328] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 742.589378] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c3e8d05f-cb0c-46e9-8483-e4d284db9f31 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.597262] env[69328]: DEBUG oslo_vmware.api [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Waiting for the task: (returnval){ [ 742.597262] env[69328]: value = "task-3273106" [ 742.597262] env[69328]: _type = "Task" [ 742.597262] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.608529] env[69328]: DEBUG oslo_vmware.api [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5210a88a-8939-051e-c913-399e0d5c53d5, 'name': SearchDatastore_Task, 'duration_secs': 0.011739} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.612358] env[69328]: DEBUG oslo_concurrency.lockutils [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 742.612628] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 732342ea-2f73-40ea-a826-883ddc7a385a/732342ea-2f73-40ea-a826-883ddc7a385a.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 742.613472] env[69328]: DEBUG oslo_vmware.api [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Task: {'id': task-3273106, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.614166] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df2764ec-cd44-4b7e-8fbb-b5cb849dc32d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.617606] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-644a023e-39fe-46db-9cb1-3ef5183dac20 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.622822] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Acquiring lock "b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 742.623086] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Lock "b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 742.628047] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9596a34-ead5-43d1-8423-1f3882de6edd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.634662] env[69328]: DEBUG oslo_vmware.api [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Waiting for the task: (returnval){ [ 742.634662] env[69328]: value = "task-3273107" [ 742.634662] env[69328]: _type = "Task" [ 742.634662] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.663628] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c15c7a07-9960-4515-afa2-859d897e0565 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.670072] env[69328]: DEBUG oslo_vmware.api [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Task: {'id': task-3273107, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.675813] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66471c73-5112-4c56-baf5-51fb67933888 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.690355] env[69328]: DEBUG nova.compute.provider_tree [None req-d9988052-6a38-48dd-86f7-6ef56deaba6f tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 742.960315] env[69328]: DEBUG oslo_vmware.api [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52c57276-241c-d42a-30c4-75a3348d570e, 'name': SearchDatastore_Task, 'duration_secs': 0.009839} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.961498] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d33d858-bb76-47a4-9831-b146f786b53b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.969354] env[69328]: DEBUG oslo_vmware.api [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 742.969354] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f4ea00-50d7-fa71-0c4f-513acfbe2e92" [ 742.969354] env[69328]: _type = "Task" [ 742.969354] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.980157] env[69328]: DEBUG oslo_vmware.api [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f4ea00-50d7-fa71-0c4f-513acfbe2e92, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.111151] env[69328]: DEBUG oslo_vmware.api [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Task: {'id': task-3273106, 'name': PowerOnVM_Task, 'duration_secs': 0.47149} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.111449] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 743.111655] env[69328]: INFO nova.compute.manager [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Took 7.30 seconds to spawn the instance on the hypervisor. [ 743.112728] env[69328]: DEBUG nova.compute.manager [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 743.112728] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bb34300-39fe-451d-b8c9-1760aec46d2b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.147560] env[69328]: DEBUG oslo_vmware.api [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Task: {'id': task-3273107, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.507905} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.147900] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 732342ea-2f73-40ea-a826-883ddc7a385a/732342ea-2f73-40ea-a826-883ddc7a385a.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 743.148197] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 743.148498] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6d057754-d1fb-452d-831c-c4bbc18253a6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.156270] env[69328]: DEBUG oslo_vmware.api [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Waiting for the task: (returnval){ [ 743.156270] env[69328]: value = "task-3273108" [ 743.156270] env[69328]: _type = "Task" [ 743.156270] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.167557] env[69328]: DEBUG oslo_vmware.api [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Task: {'id': task-3273108, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.193477] env[69328]: DEBUG nova.scheduler.client.report [None req-d9988052-6a38-48dd-86f7-6ef56deaba6f tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 743.481157] env[69328]: DEBUG oslo_vmware.api [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f4ea00-50d7-fa71-0c4f-513acfbe2e92, 'name': SearchDatastore_Task, 'duration_secs': 0.020471} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.481443] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 743.481789] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] c465c53f-d96b-461b-b8ff-b19929b4f789/c465c53f-d96b-461b-b8ff-b19929b4f789.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 743.482105] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fdc197ff-dde1-476a-ac7a-1aea3274330f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.486711] env[69328]: DEBUG nova.compute.manager [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 5292b759-9d1f-486a-b4d6-90519b3ae986] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 743.490391] env[69328]: DEBUG oslo_vmware.api [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 743.490391] env[69328]: value = "task-3273109" [ 743.490391] env[69328]: _type = "Task" [ 743.490391] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.498803] env[69328]: DEBUG oslo_vmware.api [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273109, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.513942] env[69328]: DEBUG nova.virt.hardware [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 743.514189] env[69328]: DEBUG nova.virt.hardware [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 743.514347] env[69328]: DEBUG nova.virt.hardware [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 743.514528] env[69328]: DEBUG nova.virt.hardware [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 743.514672] env[69328]: DEBUG nova.virt.hardware [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 743.514819] env[69328]: DEBUG nova.virt.hardware [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 743.515035] env[69328]: DEBUG nova.virt.hardware [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 743.515198] env[69328]: DEBUG nova.virt.hardware [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 743.515363] env[69328]: DEBUG nova.virt.hardware [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 743.515522] env[69328]: DEBUG nova.virt.hardware [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 743.515686] env[69328]: DEBUG nova.virt.hardware [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 743.516520] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3787d01f-9ce4-46d3-9eaa-428b61b0037b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.524202] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-843738af-f170-4cd6-980c-064f38932f83 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.630277] env[69328]: INFO nova.compute.manager [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Took 47.60 seconds to build instance. [ 743.669461] env[69328]: DEBUG oslo_vmware.api [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Task: {'id': task-3273108, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072095} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.669799] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 743.670653] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e58c7f76-fecf-4e5d-8ca8-99f1d94ee7ce {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.692311] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Reconfiguring VM instance instance-00000024 to attach disk [datastore2] 732342ea-2f73-40ea-a826-883ddc7a385a/732342ea-2f73-40ea-a826-883ddc7a385a.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 743.692645] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ba15e27b-37c8-4f27-8ec9-15f85a97b9e8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.708950] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d9988052-6a38-48dd-86f7-6ef56deaba6f tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.248s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 743.712118] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.477s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 743.713341] env[69328]: INFO nova.compute.claims [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: afa25f89-ccda-4b77-aaa1-a3b62b53d870] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 743.723195] env[69328]: DEBUG oslo_vmware.api [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Waiting for the task: (returnval){ [ 743.723195] env[69328]: value = "task-3273110" [ 743.723195] env[69328]: _type = "Task" [ 743.723195] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.733267] env[69328]: DEBUG oslo_vmware.api [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Task: {'id': task-3273110, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.734456] env[69328]: INFO nova.scheduler.client.report [None req-d9988052-6a38-48dd-86f7-6ef56deaba6f tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Deleted allocations for instance a798c3f2-ccde-488e-8a14-21f4a04f8e12 [ 743.913777] env[69328]: DEBUG nova.compute.manager [req-ddb2c059-7db7-4504-990e-276f24e5d1d8 req-54bf3ace-0f4a-4aa4-98a4-3fe3594f1a1f service nova] [instance: 5292b759-9d1f-486a-b4d6-90519b3ae986] Received event network-vif-plugged-b53cf739-4e56-4bd3-85f1-7e758b465dd8 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 743.914012] env[69328]: DEBUG oslo_concurrency.lockutils [req-ddb2c059-7db7-4504-990e-276f24e5d1d8 req-54bf3ace-0f4a-4aa4-98a4-3fe3594f1a1f service nova] Acquiring lock "5292b759-9d1f-486a-b4d6-90519b3ae986-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 743.914252] env[69328]: DEBUG oslo_concurrency.lockutils [req-ddb2c059-7db7-4504-990e-276f24e5d1d8 req-54bf3ace-0f4a-4aa4-98a4-3fe3594f1a1f service nova] Lock "5292b759-9d1f-486a-b4d6-90519b3ae986-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 743.914423] env[69328]: DEBUG oslo_concurrency.lockutils [req-ddb2c059-7db7-4504-990e-276f24e5d1d8 req-54bf3ace-0f4a-4aa4-98a4-3fe3594f1a1f service nova] Lock "5292b759-9d1f-486a-b4d6-90519b3ae986-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 743.914590] env[69328]: DEBUG nova.compute.manager [req-ddb2c059-7db7-4504-990e-276f24e5d1d8 req-54bf3ace-0f4a-4aa4-98a4-3fe3594f1a1f service nova] [instance: 5292b759-9d1f-486a-b4d6-90519b3ae986] No waiting events found dispatching network-vif-plugged-b53cf739-4e56-4bd3-85f1-7e758b465dd8 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 743.914799] env[69328]: WARNING nova.compute.manager [req-ddb2c059-7db7-4504-990e-276f24e5d1d8 req-54bf3ace-0f4a-4aa4-98a4-3fe3594f1a1f service nova] [instance: 5292b759-9d1f-486a-b4d6-90519b3ae986] Received unexpected event network-vif-plugged-b53cf739-4e56-4bd3-85f1-7e758b465dd8 for instance with vm_state building and task_state spawning. [ 744.000161] env[69328]: DEBUG oslo_vmware.api [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273109, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.453286} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.000435] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] c465c53f-d96b-461b-b8ff-b19929b4f789/c465c53f-d96b-461b-b8ff-b19929b4f789.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 744.000646] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 744.000930] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ffe8a01b-9e54-4bd9-8e2a-7371903065c5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.009570] env[69328]: DEBUG oslo_vmware.api [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 744.009570] env[69328]: value = "task-3273111" [ 744.009570] env[69328]: _type = "Task" [ 744.009570] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.018260] env[69328]: DEBUG oslo_vmware.api [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273111, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.058725] env[69328]: DEBUG nova.network.neutron [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 5292b759-9d1f-486a-b4d6-90519b3ae986] Successfully updated port: b53cf739-4e56-4bd3-85f1-7e758b465dd8 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 744.132145] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a447071-b6fc-4af0-8ce5-82e79c290b4f tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Lock "3923403b-2e8f-4033-89ee-9a907aff1d49" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 90.084s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 744.235030] env[69328]: DEBUG oslo_vmware.api [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Task: {'id': task-3273110, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.242198] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d9988052-6a38-48dd-86f7-6ef56deaba6f tempest-VolumesAssistedSnapshotsTest-1188464349 tempest-VolumesAssistedSnapshotsTest-1188464349-project-member] Lock "a798c3f2-ccde-488e-8a14-21f4a04f8e12" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.873s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 744.523433] env[69328]: DEBUG oslo_vmware.api [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273111, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077955} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.523733] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 744.524573] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-336c5198-606d-4229-b5a9-18468647e4e6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.546537] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Reconfiguring VM instance instance-00000026 to attach disk [datastore1] c465c53f-d96b-461b-b8ff-b19929b4f789/c465c53f-d96b-461b-b8ff-b19929b4f789.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 744.546895] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c47aa25b-483e-477e-92ee-05fec5065288 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.560934] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Acquiring lock "refresh_cache-5292b759-9d1f-486a-b4d6-90519b3ae986" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 744.561114] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Acquired lock "refresh_cache-5292b759-9d1f-486a-b4d6-90519b3ae986" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 744.561217] env[69328]: DEBUG nova.network.neutron [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 5292b759-9d1f-486a-b4d6-90519b3ae986] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 744.568275] env[69328]: DEBUG oslo_vmware.api [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 744.568275] env[69328]: value = "task-3273112" [ 744.568275] env[69328]: _type = "Task" [ 744.568275] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.577241] env[69328]: DEBUG oslo_vmware.api [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273112, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.635985] env[69328]: DEBUG nova.compute.manager [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 744.733855] env[69328]: DEBUG oslo_vmware.api [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Task: {'id': task-3273110, 'name': ReconfigVM_Task, 'duration_secs': 0.763839} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.734207] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Reconfigured VM instance instance-00000024 to attach disk [datastore2] 732342ea-2f73-40ea-a826-883ddc7a385a/732342ea-2f73-40ea-a826-883ddc7a385a.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 744.734839] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c2b7254c-0d35-4d12-a011-c00fa431daea {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.743464] env[69328]: DEBUG oslo_vmware.api [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Waiting for the task: (returnval){ [ 744.743464] env[69328]: value = "task-3273113" [ 744.743464] env[69328]: _type = "Task" [ 744.743464] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.762977] env[69328]: DEBUG oslo_vmware.api [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Task: {'id': task-3273113, 'name': Rename_Task} progress is 10%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.032025] env[69328]: DEBUG nova.compute.manager [req-a03a74f7-d9c7-4a36-bd8b-c7b4048cb24a req-47b971da-1517-4d0b-98c7-252648170e35 service nova] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Received event network-changed-efc73e76-7767-42a3-b5a4-3891364b487f {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 745.032371] env[69328]: DEBUG nova.compute.manager [req-a03a74f7-d9c7-4a36-bd8b-c7b4048cb24a req-47b971da-1517-4d0b-98c7-252648170e35 service nova] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Refreshing instance network info cache due to event network-changed-efc73e76-7767-42a3-b5a4-3891364b487f. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 745.032698] env[69328]: DEBUG oslo_concurrency.lockutils [req-a03a74f7-d9c7-4a36-bd8b-c7b4048cb24a req-47b971da-1517-4d0b-98c7-252648170e35 service nova] Acquiring lock "refresh_cache-e92953f4-b634-4ef9-a5ad-63a886cfa007" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 745.032935] env[69328]: DEBUG oslo_concurrency.lockutils [req-a03a74f7-d9c7-4a36-bd8b-c7b4048cb24a req-47b971da-1517-4d0b-98c7-252648170e35 service nova] Acquired lock "refresh_cache-e92953f4-b634-4ef9-a5ad-63a886cfa007" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 745.033197] env[69328]: DEBUG nova.network.neutron [req-a03a74f7-d9c7-4a36-bd8b-c7b4048cb24a req-47b971da-1517-4d0b-98c7-252648170e35 service nova] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Refreshing network info cache for port efc73e76-7767-42a3-b5a4-3891364b487f {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 745.078541] env[69328]: DEBUG oslo_vmware.api [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273112, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.129367] env[69328]: DEBUG nova.network.neutron [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 5292b759-9d1f-486a-b4d6-90519b3ae986] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 745.161845] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 745.257721] env[69328]: DEBUG oslo_vmware.api [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Task: {'id': task-3273113, 'name': Rename_Task, 'duration_secs': 0.140945} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.258012] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 745.258267] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-25b0c979-c41f-464a-9d25-1889f248c34e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.266382] env[69328]: DEBUG oslo_vmware.api [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Waiting for the task: (returnval){ [ 745.266382] env[69328]: value = "task-3273114" [ 745.266382] env[69328]: _type = "Task" [ 745.266382] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.281531] env[69328]: DEBUG oslo_vmware.api [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Task: {'id': task-3273114, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.300972] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95ac650f-962b-42ed-b6cb-11288c7a0d2a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.308906] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99b976ca-7b5c-4d15-b954-76e79c98a5fa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.344152] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bce9dc8-9a9a-4200-b1b9-d5ca6979bc20 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.353166] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4909b69a-3cd5-4b21-abba-289e1ced711b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.367851] env[69328]: DEBUG nova.compute.provider_tree [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 745.580437] env[69328]: DEBUG oslo_vmware.api [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273112, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.706449] env[69328]: DEBUG nova.network.neutron [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 5292b759-9d1f-486a-b4d6-90519b3ae986] Updating instance_info_cache with network_info: [{"id": "b53cf739-4e56-4bd3-85f1-7e758b465dd8", "address": "fa:16:3e:3a:5e:b5", "network": {"id": "b18f73ad-2ff8-4679-853a-f27ced0e60fd", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-906269769-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a019831c6ff240f3bfbf2c2bff104788", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dcf5c3f7-4e33-4f21-b323-3673930b789c", "external-id": "nsx-vlan-transportzone-983", "segmentation_id": 983, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb53cf739-4e", "ovs_interfaceid": "b53cf739-4e56-4bd3-85f1-7e758b465dd8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 745.777271] env[69328]: DEBUG oslo_vmware.api [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Task: {'id': task-3273114, 'name': PowerOnVM_Task, 'duration_secs': 0.438662} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.777557] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 745.778151] env[69328]: DEBUG nova.compute.manager [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 745.779038] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a3e563e-673a-45b1-b9a6-9ea554220cba {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.871171] env[69328]: DEBUG nova.scheduler.client.report [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 745.917249] env[69328]: DEBUG nova.network.neutron [req-a03a74f7-d9c7-4a36-bd8b-c7b4048cb24a req-47b971da-1517-4d0b-98c7-252648170e35 service nova] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Updated VIF entry in instance network info cache for port efc73e76-7767-42a3-b5a4-3891364b487f. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 745.917643] env[69328]: DEBUG nova.network.neutron [req-a03a74f7-d9c7-4a36-bd8b-c7b4048cb24a req-47b971da-1517-4d0b-98c7-252648170e35 service nova] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Updating instance_info_cache with network_info: [{"id": "efc73e76-7767-42a3-b5a4-3891364b487f", "address": "fa:16:3e:0d:8d:73", "network": {"id": "2060ab72-61b3-4aea-bcdd-0b8a76a11fc7", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1995944610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.195", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65edf1e9f4344038878d05021bbdef78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefc73e76-77", "ovs_interfaceid": "efc73e76-7767-42a3-b5a4-3891364b487f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 746.079946] env[69328]: DEBUG oslo_vmware.api [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273112, 'name': ReconfigVM_Task, 'duration_secs': 1.075627} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.080484] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Reconfigured VM instance instance-00000026 to attach disk [datastore1] c465c53f-d96b-461b-b8ff-b19929b4f789/c465c53f-d96b-461b-b8ff-b19929b4f789.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 746.081431] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e948d163-9bd9-48d9-81a6-aa75cd868493 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.089335] env[69328]: DEBUG oslo_vmware.api [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 746.089335] env[69328]: value = "task-3273115" [ 746.089335] env[69328]: _type = "Task" [ 746.089335] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.097679] env[69328]: DEBUG oslo_vmware.api [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273115, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.209087] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Releasing lock "refresh_cache-5292b759-9d1f-486a-b4d6-90519b3ae986" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 746.209527] env[69328]: DEBUG nova.compute.manager [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 5292b759-9d1f-486a-b4d6-90519b3ae986] Instance network_info: |[{"id": "b53cf739-4e56-4bd3-85f1-7e758b465dd8", "address": "fa:16:3e:3a:5e:b5", "network": {"id": "b18f73ad-2ff8-4679-853a-f27ced0e60fd", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-906269769-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a019831c6ff240f3bfbf2c2bff104788", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dcf5c3f7-4e33-4f21-b323-3673930b789c", "external-id": "nsx-vlan-transportzone-983", "segmentation_id": 983, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb53cf739-4e", "ovs_interfaceid": "b53cf739-4e56-4bd3-85f1-7e758b465dd8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 746.210011] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 5292b759-9d1f-486a-b4d6-90519b3ae986] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3a:5e:b5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dcf5c3f7-4e33-4f21-b323-3673930b789c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b53cf739-4e56-4bd3-85f1-7e758b465dd8', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 746.217825] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Creating folder: Project (a019831c6ff240f3bfbf2c2bff104788). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 746.218145] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aeb3c113-9bb6-420d-8490-6f05cb245808 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.231312] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Created folder: Project (a019831c6ff240f3bfbf2c2bff104788) in parent group-v653649. [ 746.231506] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Creating folder: Instances. Parent ref: group-v653771. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 746.231757] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-74baa867-d6fc-415f-a6c1-e2f20d79e4f8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.242420] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Created folder: Instances in parent group-v653771. [ 746.242686] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 746.242885] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5292b759-9d1f-486a-b4d6-90519b3ae986] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 746.243104] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9c6276de-9abc-4cf1-954a-d13e02498217 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.263535] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 746.263535] env[69328]: value = "task-3273118" [ 746.263535] env[69328]: _type = "Task" [ 746.263535] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.273718] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273118, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.306860] env[69328]: DEBUG oslo_concurrency.lockutils [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 746.377486] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.666s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 746.378409] env[69328]: DEBUG nova.compute.manager [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: afa25f89-ccda-4b77-aaa1-a3b62b53d870] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 746.381702] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 34.805s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 746.409563] env[69328]: DEBUG nova.compute.manager [req-b120e883-28e2-42b6-8126-e3d9c5c56b87 req-b16ecc1a-2abe-4b75-9dae-a192bbd65372 service nova] [instance: 5292b759-9d1f-486a-b4d6-90519b3ae986] Received event network-changed-b53cf739-4e56-4bd3-85f1-7e758b465dd8 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 746.409749] env[69328]: DEBUG nova.compute.manager [req-b120e883-28e2-42b6-8126-e3d9c5c56b87 req-b16ecc1a-2abe-4b75-9dae-a192bbd65372 service nova] [instance: 5292b759-9d1f-486a-b4d6-90519b3ae986] Refreshing instance network info cache due to event network-changed-b53cf739-4e56-4bd3-85f1-7e758b465dd8. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 746.410207] env[69328]: DEBUG oslo_concurrency.lockutils [req-b120e883-28e2-42b6-8126-e3d9c5c56b87 req-b16ecc1a-2abe-4b75-9dae-a192bbd65372 service nova] Acquiring lock "refresh_cache-5292b759-9d1f-486a-b4d6-90519b3ae986" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 746.410207] env[69328]: DEBUG oslo_concurrency.lockutils [req-b120e883-28e2-42b6-8126-e3d9c5c56b87 req-b16ecc1a-2abe-4b75-9dae-a192bbd65372 service nova] Acquired lock "refresh_cache-5292b759-9d1f-486a-b4d6-90519b3ae986" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 746.410300] env[69328]: DEBUG nova.network.neutron [req-b120e883-28e2-42b6-8126-e3d9c5c56b87 req-b16ecc1a-2abe-4b75-9dae-a192bbd65372 service nova] [instance: 5292b759-9d1f-486a-b4d6-90519b3ae986] Refreshing network info cache for port b53cf739-4e56-4bd3-85f1-7e758b465dd8 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 746.421700] env[69328]: DEBUG oslo_concurrency.lockutils [req-a03a74f7-d9c7-4a36-bd8b-c7b4048cb24a req-47b971da-1517-4d0b-98c7-252648170e35 service nova] Releasing lock "refresh_cache-e92953f4-b634-4ef9-a5ad-63a886cfa007" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 746.602090] env[69328]: DEBUG oslo_vmware.api [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273115, 'name': Rename_Task, 'duration_secs': 0.175343} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.603170] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 746.603648] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-40f16c82-7526-4873-a232-ae7239fedc56 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.611306] env[69328]: DEBUG oslo_vmware.api [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 746.611306] env[69328]: value = "task-3273119" [ 746.611306] env[69328]: _type = "Task" [ 746.611306] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.623686] env[69328]: DEBUG oslo_vmware.api [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273119, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.779110] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273118, 'name': CreateVM_Task, 'duration_secs': 0.399355} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.780183] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5292b759-9d1f-486a-b4d6-90519b3ae986] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 746.780941] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 746.781150] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 746.781497] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 746.781776] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67dc76aa-d520-4dd7-8e18-fbd9d8dcfe4f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.788432] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Waiting for the task: (returnval){ [ 746.788432] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]523e54b7-d5cd-551b-7bd7-afb4cad242a8" [ 746.788432] env[69328]: _type = "Task" [ 746.788432] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.797730] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]523e54b7-d5cd-551b-7bd7-afb4cad242a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.897878] env[69328]: DEBUG nova.compute.utils [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 746.899482] env[69328]: DEBUG nova.compute.manager [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: afa25f89-ccda-4b77-aaa1-a3b62b53d870] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 746.899836] env[69328]: DEBUG nova.network.neutron [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: afa25f89-ccda-4b77-aaa1-a3b62b53d870] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 746.996492] env[69328]: DEBUG nova.policy [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b896180d007146d9b95cb7b9aa75f8e2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a019831c6ff240f3bfbf2c2bff104788', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 747.124828] env[69328]: DEBUG oslo_vmware.api [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273119, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.299347] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]523e54b7-d5cd-551b-7bd7-afb4cad242a8, 'name': SearchDatastore_Task, 'duration_secs': 0.01951} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.299662] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 747.300737] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 5292b759-9d1f-486a-b4d6-90519b3ae986] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 747.301094] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 747.301262] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 747.301591] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 747.301912] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ad35c1c2-5a89-4197-84cb-40aa7c0f590b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.313805] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 747.314334] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 747.315362] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ddb7016-d6ee-4343-971a-dd3751e238c4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.321791] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Waiting for the task: (returnval){ [ 747.321791] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52533cda-67e2-ca25-6d02-a17ba382c585" [ 747.321791] env[69328]: _type = "Task" [ 747.321791] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.331599] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52533cda-67e2-ca25-6d02-a17ba382c585, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.404745] env[69328]: DEBUG nova.compute.manager [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: afa25f89-ccda-4b77-aaa1-a3b62b53d870] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 747.443909] env[69328]: WARNING nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 676173ee-8001-48c6-bd28-09130f6dd99a is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 747.444157] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 747.444375] env[69328]: WARNING nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 747.444556] env[69328]: WARNING nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 46526210-2783-408d-9ecb-773f33ff0c66 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 747.444750] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance e92953f4-b634-4ef9-a5ad-63a886cfa007 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 747.445145] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 15a8de08-4d20-4329-9867-53e5dff82878 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 747.445564] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 747.445564] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 747.445564] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance bc9c3a41-7264-4d69-bc15-397b5fa0a8ad actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 747.445715] env[69328]: WARNING nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance bbbfb48d-b474-4a6e-9078-336f23d2c343 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 747.445771] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance b0a1441c-81e2-4131-a2ff-f5042d559d9f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 747.445888] env[69328]: WARNING nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 747.446014] env[69328]: WARNING nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 8e3a73c1-b622-47f4-99af-71b6dba7c09b is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 747.446133] env[69328]: WARNING nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance e1eec0ce-8df7-402a-b628-5dfdc11949e7 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 747.446247] env[69328]: WARNING nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 1e7e9e6e-c084-480c-8653-8441c13d7514 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 747.446361] env[69328]: WARNING nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 146a3eef-0971-4f6e-bd24-58b38a1de0ed is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 747.446472] env[69328]: WARNING nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 4c54c0dd-32f1-4d35-b770-3e1a540c54a7 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 747.446593] env[69328]: WARNING nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance e5d3df12-5334-44c8-9a44-1674e57918bb is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 747.446708] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 732342ea-2f73-40ea-a826-883ddc7a385a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 747.446858] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 3923403b-2e8f-4033-89ee-9a907aff1d49 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 747.446918] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance c465c53f-d96b-461b-b8ff-b19929b4f789 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 747.447041] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 5292b759-9d1f-486a-b4d6-90519b3ae986 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 747.447150] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance afa25f89-ccda-4b77-aaa1-a3b62b53d870 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 747.453037] env[69328]: DEBUG nova.network.neutron [req-b120e883-28e2-42b6-8126-e3d9c5c56b87 req-b16ecc1a-2abe-4b75-9dae-a192bbd65372 service nova] [instance: 5292b759-9d1f-486a-b4d6-90519b3ae986] Updated VIF entry in instance network info cache for port b53cf739-4e56-4bd3-85f1-7e758b465dd8. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 747.453037] env[69328]: DEBUG nova.network.neutron [req-b120e883-28e2-42b6-8126-e3d9c5c56b87 req-b16ecc1a-2abe-4b75-9dae-a192bbd65372 service nova] [instance: 5292b759-9d1f-486a-b4d6-90519b3ae986] Updating instance_info_cache with network_info: [{"id": "b53cf739-4e56-4bd3-85f1-7e758b465dd8", "address": "fa:16:3e:3a:5e:b5", "network": {"id": "b18f73ad-2ff8-4679-853a-f27ced0e60fd", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-906269769-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a019831c6ff240f3bfbf2c2bff104788", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dcf5c3f7-4e33-4f21-b323-3673930b789c", "external-id": "nsx-vlan-transportzone-983", "segmentation_id": 983, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb53cf739-4e", "ovs_interfaceid": "b53cf739-4e56-4bd3-85f1-7e758b465dd8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 747.628958] env[69328]: DEBUG oslo_vmware.api [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273119, 'name': PowerOnVM_Task, 'duration_secs': 1.000465} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.629351] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 747.629588] env[69328]: INFO nova.compute.manager [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Took 9.09 seconds to spawn the instance on the hypervisor. [ 747.629775] env[69328]: DEBUG nova.compute.manager [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 747.630646] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec3ef7fb-660f-41f2-be91-cd89fb11dd85 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.723527] env[69328]: DEBUG nova.network.neutron [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: afa25f89-ccda-4b77-aaa1-a3b62b53d870] Successfully created port: b4eaba7b-0335-4344-94ab-94e9d7a355a2 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 747.836020] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52533cda-67e2-ca25-6d02-a17ba382c585, 'name': SearchDatastore_Task, 'duration_secs': 0.018162} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.836020] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5706398a-e1b2-4378-b2ed-1d91ea37927c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.840949] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Waiting for the task: (returnval){ [ 747.840949] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52bd3943-fc23-af43-3316-cb36e281291b" [ 747.840949] env[69328]: _type = "Task" [ 747.840949] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.850514] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52bd3943-fc23-af43-3316-cb36e281291b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.952046] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 690096cf-a0bd-4db1-ad97-8d8a37ad7c84 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 747.958366] env[69328]: DEBUG oslo_concurrency.lockutils [req-b120e883-28e2-42b6-8126-e3d9c5c56b87 req-b16ecc1a-2abe-4b75-9dae-a192bbd65372 service nova] Releasing lock "refresh_cache-5292b759-9d1f-486a-b4d6-90519b3ae986" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 748.148710] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fd9effd8-506c-4bc3-8d83-942135c74d2b tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Acquiring lock "732342ea-2f73-40ea-a826-883ddc7a385a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 748.149139] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fd9effd8-506c-4bc3-8d83-942135c74d2b tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Lock "732342ea-2f73-40ea-a826-883ddc7a385a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 748.149351] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fd9effd8-506c-4bc3-8d83-942135c74d2b tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Acquiring lock "732342ea-2f73-40ea-a826-883ddc7a385a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 748.149857] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fd9effd8-506c-4bc3-8d83-942135c74d2b tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Lock "732342ea-2f73-40ea-a826-883ddc7a385a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 748.149857] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fd9effd8-506c-4bc3-8d83-942135c74d2b tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Lock "732342ea-2f73-40ea-a826-883ddc7a385a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 748.158781] env[69328]: INFO nova.compute.manager [None req-fd9effd8-506c-4bc3-8d83-942135c74d2b tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Terminating instance [ 748.189560] env[69328]: INFO nova.compute.manager [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Took 46.97 seconds to build instance. [ 748.354030] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52bd3943-fc23-af43-3316-cb36e281291b, 'name': SearchDatastore_Task, 'duration_secs': 0.018887} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.354342] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 748.357022] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 5292b759-9d1f-486a-b4d6-90519b3ae986/5292b759-9d1f-486a-b4d6-90519b3ae986.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 748.357022] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5dc2d0b2-1e16-47da-8990-5478e1901740 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.363959] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Waiting for the task: (returnval){ [ 748.363959] env[69328]: value = "task-3273120" [ 748.363959] env[69328]: _type = "Task" [ 748.363959] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.375406] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': task-3273120, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.416371] env[69328]: DEBUG nova.compute.manager [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: afa25f89-ccda-4b77-aaa1-a3b62b53d870] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 748.443897] env[69328]: DEBUG nova.virt.hardware [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 748.444272] env[69328]: DEBUG nova.virt.hardware [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 748.444882] env[69328]: DEBUG nova.virt.hardware [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 748.444882] env[69328]: DEBUG nova.virt.hardware [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 748.445049] env[69328]: DEBUG nova.virt.hardware [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 748.445212] env[69328]: DEBUG nova.virt.hardware [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 748.445425] env[69328]: DEBUG nova.virt.hardware [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 748.445597] env[69328]: DEBUG nova.virt.hardware [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 748.445766] env[69328]: DEBUG nova.virt.hardware [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 748.445928] env[69328]: DEBUG nova.virt.hardware [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 748.446200] env[69328]: DEBUG nova.virt.hardware [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 748.447175] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab06a98b-faae-469b-9e39-48bd3a1c58d6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.457142] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d039734-db1c-45f8-b874-8e5448eb16f9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.462064] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 99e31dfd-5d41-4564-886f-becc25ca289c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 748.673779] env[69328]: DEBUG nova.compute.manager [req-fa0dc1d2-c2fc-421b-abe6-07441b01f6ab req-45f0f569-7e72-4c4a-9a56-79647e1bd40b service nova] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Received event network-changed-efc73e76-7767-42a3-b5a4-3891364b487f {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 748.675341] env[69328]: DEBUG nova.compute.manager [req-fa0dc1d2-c2fc-421b-abe6-07441b01f6ab req-45f0f569-7e72-4c4a-9a56-79647e1bd40b service nova] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Refreshing instance network info cache due to event network-changed-efc73e76-7767-42a3-b5a4-3891364b487f. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 748.675936] env[69328]: DEBUG oslo_concurrency.lockutils [req-fa0dc1d2-c2fc-421b-abe6-07441b01f6ab req-45f0f569-7e72-4c4a-9a56-79647e1bd40b service nova] Acquiring lock "refresh_cache-e92953f4-b634-4ef9-a5ad-63a886cfa007" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.676203] env[69328]: DEBUG oslo_concurrency.lockutils [req-fa0dc1d2-c2fc-421b-abe6-07441b01f6ab req-45f0f569-7e72-4c4a-9a56-79647e1bd40b service nova] Acquired lock "refresh_cache-e92953f4-b634-4ef9-a5ad-63a886cfa007" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 748.676455] env[69328]: DEBUG nova.network.neutron [req-fa0dc1d2-c2fc-421b-abe6-07441b01f6ab req-45f0f569-7e72-4c4a-9a56-79647e1bd40b service nova] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Refreshing network info cache for port efc73e76-7767-42a3-b5a4-3891364b487f {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 748.682290] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fd9effd8-506c-4bc3-8d83-942135c74d2b tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Acquiring lock "refresh_cache-732342ea-2f73-40ea-a826-883ddc7a385a" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.682521] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fd9effd8-506c-4bc3-8d83-942135c74d2b tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Acquired lock "refresh_cache-732342ea-2f73-40ea-a826-883ddc7a385a" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 748.682697] env[69328]: DEBUG nova.network.neutron [None req-fd9effd8-506c-4bc3-8d83-942135c74d2b tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 748.691827] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2b28bca3-1798-41c2-a5d5-ea0ba8360354 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "c465c53f-d96b-461b-b8ff-b19929b4f789" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 93.387s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 748.881395] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': task-3273120, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.966166] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance d724a141-35e7-4483-99aa-8a17066fb63b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 749.195348] env[69328]: DEBUG nova.compute.manager [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 749.218150] env[69328]: DEBUG nova.network.neutron [None req-fd9effd8-506c-4bc3-8d83-942135c74d2b tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 749.300290] env[69328]: DEBUG nova.network.neutron [None req-fd9effd8-506c-4bc3-8d83-942135c74d2b tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.377601] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': task-3273120, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.538363} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.377601] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 5292b759-9d1f-486a-b4d6-90519b3ae986/5292b759-9d1f-486a-b4d6-90519b3ae986.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 749.377601] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 5292b759-9d1f-486a-b4d6-90519b3ae986] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 749.377601] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b61290dc-a572-4ead-ad0b-6bad563d2a3d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.386570] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Waiting for the task: (returnval){ [ 749.386570] env[69328]: value = "task-3273121" [ 749.386570] env[69328]: _type = "Task" [ 749.386570] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.404394] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': task-3273121, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.469691] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 18022645-9a2a-489e-b0b1-486165f46f14 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 749.570528] env[69328]: DEBUG nova.network.neutron [req-fa0dc1d2-c2fc-421b-abe6-07441b01f6ab req-45f0f569-7e72-4c4a-9a56-79647e1bd40b service nova] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Updated VIF entry in instance network info cache for port efc73e76-7767-42a3-b5a4-3891364b487f. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 749.570528] env[69328]: DEBUG nova.network.neutron [req-fa0dc1d2-c2fc-421b-abe6-07441b01f6ab req-45f0f569-7e72-4c4a-9a56-79647e1bd40b service nova] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Updating instance_info_cache with network_info: [{"id": "efc73e76-7767-42a3-b5a4-3891364b487f", "address": "fa:16:3e:0d:8d:73", "network": {"id": "2060ab72-61b3-4aea-bcdd-0b8a76a11fc7", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1995944610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65edf1e9f4344038878d05021bbdef78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefc73e76-77", "ovs_interfaceid": "efc73e76-7767-42a3-b5a4-3891364b487f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.630296] env[69328]: DEBUG nova.network.neutron [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: afa25f89-ccda-4b77-aaa1-a3b62b53d870] Successfully updated port: b4eaba7b-0335-4344-94ab-94e9d7a355a2 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 749.672019] env[69328]: DEBUG nova.compute.manager [req-4b348bc2-684c-479d-aa1b-419e587f5f89 req-83766846-6bbd-4bd7-acdf-c7cb75dc5c5f service nova] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Received event network-changed-1b80d882-8edf-4d06-a91a-6568cce981e8 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 749.672019] env[69328]: DEBUG nova.compute.manager [req-4b348bc2-684c-479d-aa1b-419e587f5f89 req-83766846-6bbd-4bd7-acdf-c7cb75dc5c5f service nova] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Refreshing instance network info cache due to event network-changed-1b80d882-8edf-4d06-a91a-6568cce981e8. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 749.672019] env[69328]: DEBUG oslo_concurrency.lockutils [req-4b348bc2-684c-479d-aa1b-419e587f5f89 req-83766846-6bbd-4bd7-acdf-c7cb75dc5c5f service nova] Acquiring lock "refresh_cache-3923403b-2e8f-4033-89ee-9a907aff1d49" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.672019] env[69328]: DEBUG oslo_concurrency.lockutils [req-4b348bc2-684c-479d-aa1b-419e587f5f89 req-83766846-6bbd-4bd7-acdf-c7cb75dc5c5f service nova] Acquired lock "refresh_cache-3923403b-2e8f-4033-89ee-9a907aff1d49" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 749.672608] env[69328]: DEBUG nova.network.neutron [req-4b348bc2-684c-479d-aa1b-419e587f5f89 req-83766846-6bbd-4bd7-acdf-c7cb75dc5c5f service nova] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Refreshing network info cache for port 1b80d882-8edf-4d06-a91a-6568cce981e8 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 749.721341] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 749.804051] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fd9effd8-506c-4bc3-8d83-942135c74d2b tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Releasing lock "refresh_cache-732342ea-2f73-40ea-a826-883ddc7a385a" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 749.804588] env[69328]: DEBUG nova.compute.manager [None req-fd9effd8-506c-4bc3-8d83-942135c74d2b tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 749.804788] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-fd9effd8-506c-4bc3-8d83-942135c74d2b tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 749.809017] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ecf308d-5db0-4df6-a2f4-faa8149eb94d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.816382] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd9effd8-506c-4bc3-8d83-942135c74d2b tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 749.816641] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-651cafbc-cb78-45ca-9503-f63e8214d9e7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.827876] env[69328]: DEBUG oslo_vmware.api [None req-fd9effd8-506c-4bc3-8d83-942135c74d2b tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Waiting for the task: (returnval){ [ 749.827876] env[69328]: value = "task-3273122" [ 749.827876] env[69328]: _type = "Task" [ 749.827876] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.839821] env[69328]: DEBUG oslo_vmware.api [None req-fd9effd8-506c-4bc3-8d83-942135c74d2b tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Task: {'id': task-3273122, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.898508] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': task-3273121, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073686} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.898897] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 5292b759-9d1f-486a-b4d6-90519b3ae986] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 749.901338] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0323504-ef60-456f-b799-df788224bd3c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.923410] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 5292b759-9d1f-486a-b4d6-90519b3ae986] Reconfiguring VM instance instance-00000027 to attach disk [datastore2] 5292b759-9d1f-486a-b4d6-90519b3ae986/5292b759-9d1f-486a-b4d6-90519b3ae986.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 749.923730] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-80af830f-36b4-4ff8-bf96-70f7bbfa6208 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.945948] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Waiting for the task: (returnval){ [ 749.945948] env[69328]: value = "task-3273123" [ 749.945948] env[69328]: _type = "Task" [ 749.945948] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.959406] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': task-3273123, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.971826] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 749.975575] env[69328]: DEBUG oslo_concurrency.lockutils [None req-da0f032d-986d-4ac6-b9b1-dd7f3fc00d76 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "c465c53f-d96b-461b-b8ff-b19929b4f789" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 749.975575] env[69328]: DEBUG oslo_concurrency.lockutils [None req-da0f032d-986d-4ac6-b9b1-dd7f3fc00d76 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "c465c53f-d96b-461b-b8ff-b19929b4f789" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 750.072459] env[69328]: DEBUG oslo_concurrency.lockutils [req-fa0dc1d2-c2fc-421b-abe6-07441b01f6ab req-45f0f569-7e72-4c4a-9a56-79647e1bd40b service nova] Releasing lock "refresh_cache-e92953f4-b634-4ef9-a5ad-63a886cfa007" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 750.072663] env[69328]: DEBUG nova.compute.manager [req-fa0dc1d2-c2fc-421b-abe6-07441b01f6ab req-45f0f569-7e72-4c4a-9a56-79647e1bd40b service nova] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Received event network-changed-1b80d882-8edf-4d06-a91a-6568cce981e8 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 750.072866] env[69328]: DEBUG nova.compute.manager [req-fa0dc1d2-c2fc-421b-abe6-07441b01f6ab req-45f0f569-7e72-4c4a-9a56-79647e1bd40b service nova] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Refreshing instance network info cache due to event network-changed-1b80d882-8edf-4d06-a91a-6568cce981e8. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 750.074105] env[69328]: DEBUG oslo_concurrency.lockutils [req-fa0dc1d2-c2fc-421b-abe6-07441b01f6ab req-45f0f569-7e72-4c4a-9a56-79647e1bd40b service nova] Acquiring lock "refresh_cache-3923403b-2e8f-4033-89ee-9a907aff1d49" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.131841] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Acquiring lock "refresh_cache-afa25f89-ccda-4b77-aaa1-a3b62b53d870" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.132116] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Acquired lock "refresh_cache-afa25f89-ccda-4b77-aaa1-a3b62b53d870" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 750.132362] env[69328]: DEBUG nova.network.neutron [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: afa25f89-ccda-4b77-aaa1-a3b62b53d870] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 750.337788] env[69328]: DEBUG oslo_vmware.api [None req-fd9effd8-506c-4bc3-8d83-942135c74d2b tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Task: {'id': task-3273122, 'name': PowerOffVM_Task, 'duration_secs': 0.173782} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.338380] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd9effd8-506c-4bc3-8d83-942135c74d2b tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 750.338626] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-fd9effd8-506c-4bc3-8d83-942135c74d2b tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 750.338891] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1dffd0ff-252f-408e-bea2-9db8b13dd27e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.362818] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7ff24e32-9bdb-4e6c-85cb-d89873012504 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Acquiring lock "3923403b-2e8f-4033-89ee-9a907aff1d49" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 750.363122] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7ff24e32-9bdb-4e6c-85cb-d89873012504 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Lock "3923403b-2e8f-4033-89ee-9a907aff1d49" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 750.363461] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7ff24e32-9bdb-4e6c-85cb-d89873012504 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Acquiring lock "3923403b-2e8f-4033-89ee-9a907aff1d49-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 750.363683] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7ff24e32-9bdb-4e6c-85cb-d89873012504 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Lock "3923403b-2e8f-4033-89ee-9a907aff1d49-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 750.363859] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7ff24e32-9bdb-4e6c-85cb-d89873012504 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Lock "3923403b-2e8f-4033-89ee-9a907aff1d49-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 750.366112] env[69328]: INFO nova.compute.manager [None req-7ff24e32-9bdb-4e6c-85cb-d89873012504 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Terminating instance [ 750.369070] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-fd9effd8-506c-4bc3-8d83-942135c74d2b tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 750.369286] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-fd9effd8-506c-4bc3-8d83-942135c74d2b tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 750.369465] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd9effd8-506c-4bc3-8d83-942135c74d2b tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Deleting the datastore file [datastore2] 732342ea-2f73-40ea-a826-883ddc7a385a {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 750.370678] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d0b629e2-a6ca-4d46-ab35-4fc62137bbba {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.385962] env[69328]: DEBUG oslo_vmware.api [None req-fd9effd8-506c-4bc3-8d83-942135c74d2b tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Waiting for the task: (returnval){ [ 750.385962] env[69328]: value = "task-3273125" [ 750.385962] env[69328]: _type = "Task" [ 750.385962] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.398983] env[69328]: DEBUG oslo_vmware.api [None req-fd9effd8-506c-4bc3-8d83-942135c74d2b tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Task: {'id': task-3273125, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.459596] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': task-3273123, 'name': ReconfigVM_Task, 'duration_secs': 0.370796} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.459596] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 5292b759-9d1f-486a-b4d6-90519b3ae986] Reconfigured VM instance instance-00000027 to attach disk [datastore2] 5292b759-9d1f-486a-b4d6-90519b3ae986/5292b759-9d1f-486a-b4d6-90519b3ae986.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 750.459596] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-17d314af-1ba9-4588-bcb4-5033fa713649 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.466222] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Waiting for the task: (returnval){ [ 750.466222] env[69328]: value = "task-3273126" [ 750.466222] env[69328]: _type = "Task" [ 750.466222] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.475648] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 750.476910] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': task-3273126, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.477747] env[69328]: DEBUG nova.compute.utils [None req-da0f032d-986d-4ac6-b9b1-dd7f3fc00d76 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 750.523809] env[69328]: DEBUG nova.network.neutron [req-4b348bc2-684c-479d-aa1b-419e587f5f89 req-83766846-6bbd-4bd7-acdf-c7cb75dc5c5f service nova] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Updated VIF entry in instance network info cache for port 1b80d882-8edf-4d06-a91a-6568cce981e8. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 750.524186] env[69328]: DEBUG nova.network.neutron [req-4b348bc2-684c-479d-aa1b-419e587f5f89 req-83766846-6bbd-4bd7-acdf-c7cb75dc5c5f service nova] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Updating instance_info_cache with network_info: [{"id": "1b80d882-8edf-4d06-a91a-6568cce981e8", "address": "fa:16:3e:32:ef:98", "network": {"id": "2060ab72-61b3-4aea-bcdd-0b8a76a11fc7", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1995944610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65edf1e9f4344038878d05021bbdef78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b80d882-8e", "ovs_interfaceid": "1b80d882-8edf-4d06-a91a-6568cce981e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 750.688266] env[69328]: DEBUG nova.network.neutron [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: afa25f89-ccda-4b77-aaa1-a3b62b53d870] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 750.880874] env[69328]: DEBUG nova.compute.manager [None req-7ff24e32-9bdb-4e6c-85cb-d89873012504 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 750.880874] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7ff24e32-9bdb-4e6c-85cb-d89873012504 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 750.880874] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5c99fc3-c21d-41ca-acb2-e5c0d61c1734 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.888609] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ff24e32-9bdb-4e6c-85cb-d89873012504 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 750.892535] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d2347b51-dcc7-4c47-9dfc-ceceaba49f57 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.900273] env[69328]: DEBUG oslo_vmware.api [None req-fd9effd8-506c-4bc3-8d83-942135c74d2b tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Task: {'id': task-3273125, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.181289} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.901864] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd9effd8-506c-4bc3-8d83-942135c74d2b tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 750.902260] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-fd9effd8-506c-4bc3-8d83-942135c74d2b tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 750.902550] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-fd9effd8-506c-4bc3-8d83-942135c74d2b tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 750.902852] env[69328]: INFO nova.compute.manager [None req-fd9effd8-506c-4bc3-8d83-942135c74d2b tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Took 1.10 seconds to destroy the instance on the hypervisor. [ 750.903303] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fd9effd8-506c-4bc3-8d83-942135c74d2b tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 750.903735] env[69328]: DEBUG oslo_vmware.api [None req-7ff24e32-9bdb-4e6c-85cb-d89873012504 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Waiting for the task: (returnval){ [ 750.903735] env[69328]: value = "task-3273127" [ 750.903735] env[69328]: _type = "Task" [ 750.903735] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.904745] env[69328]: DEBUG nova.compute.manager [-] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 750.904745] env[69328]: DEBUG nova.network.neutron [-] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 750.920635] env[69328]: DEBUG oslo_vmware.api [None req-7ff24e32-9bdb-4e6c-85cb-d89873012504 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Task: {'id': task-3273127, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.976678] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': task-3273126, 'name': Rename_Task, 'duration_secs': 0.211306} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.977295] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 5292b759-9d1f-486a-b4d6-90519b3ae986] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 750.977701] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c7a24d81-e323-4a0a-b447-9f75c540757e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.984056] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 25fb207b-9388-4198-bb48-ab7cebd43375 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 750.987196] env[69328]: DEBUG oslo_concurrency.lockutils [None req-da0f032d-986d-4ac6-b9b1-dd7f3fc00d76 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "c465c53f-d96b-461b-b8ff-b19929b4f789" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.010s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 750.994075] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Waiting for the task: (returnval){ [ 750.994075] env[69328]: value = "task-3273128" [ 750.994075] env[69328]: _type = "Task" [ 750.994075] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.001111] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': task-3273128, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.031906] env[69328]: DEBUG oslo_concurrency.lockutils [req-4b348bc2-684c-479d-aa1b-419e587f5f89 req-83766846-6bbd-4bd7-acdf-c7cb75dc5c5f service nova] Releasing lock "refresh_cache-3923403b-2e8f-4033-89ee-9a907aff1d49" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 751.031906] env[69328]: DEBUG oslo_concurrency.lockutils [req-fa0dc1d2-c2fc-421b-abe6-07441b01f6ab req-45f0f569-7e72-4c4a-9a56-79647e1bd40b service nova] Acquired lock "refresh_cache-3923403b-2e8f-4033-89ee-9a907aff1d49" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 751.031906] env[69328]: DEBUG nova.network.neutron [req-fa0dc1d2-c2fc-421b-abe6-07441b01f6ab req-45f0f569-7e72-4c4a-9a56-79647e1bd40b service nova] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Refreshing network info cache for port 1b80d882-8edf-4d06-a91a-6568cce981e8 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 751.079108] env[69328]: DEBUG nova.network.neutron [-] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 751.164951] env[69328]: DEBUG nova.network.neutron [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: afa25f89-ccda-4b77-aaa1-a3b62b53d870] Updating instance_info_cache with network_info: [{"id": "b4eaba7b-0335-4344-94ab-94e9d7a355a2", "address": "fa:16:3e:bd:e5:26", "network": {"id": "b18f73ad-2ff8-4679-853a-f27ced0e60fd", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-906269769-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a019831c6ff240f3bfbf2c2bff104788", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dcf5c3f7-4e33-4f21-b323-3673930b789c", "external-id": "nsx-vlan-transportzone-983", "segmentation_id": 983, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4eaba7b-03", "ovs_interfaceid": "b4eaba7b-0335-4344-94ab-94e9d7a355a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 751.417895] env[69328]: DEBUG oslo_vmware.api [None req-7ff24e32-9bdb-4e6c-85cb-d89873012504 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Task: {'id': task-3273127, 'name': PowerOffVM_Task, 'duration_secs': 0.366669} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.418271] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ff24e32-9bdb-4e6c-85cb-d89873012504 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 751.418479] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7ff24e32-9bdb-4e6c-85cb-d89873012504 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 751.418766] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f4ec0748-2e0f-4ed2-adae-d6342e4439dd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.488283] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 751.509882] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': task-3273128, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.583311] env[69328]: DEBUG nova.network.neutron [-] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 751.668320] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Releasing lock "refresh_cache-afa25f89-ccda-4b77-aaa1-a3b62b53d870" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 751.668686] env[69328]: DEBUG nova.compute.manager [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: afa25f89-ccda-4b77-aaa1-a3b62b53d870] Instance network_info: |[{"id": "b4eaba7b-0335-4344-94ab-94e9d7a355a2", "address": "fa:16:3e:bd:e5:26", "network": {"id": "b18f73ad-2ff8-4679-853a-f27ced0e60fd", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-906269769-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a019831c6ff240f3bfbf2c2bff104788", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dcf5c3f7-4e33-4f21-b323-3673930b789c", "external-id": "nsx-vlan-transportzone-983", "segmentation_id": 983, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4eaba7b-03", "ovs_interfaceid": "b4eaba7b-0335-4344-94ab-94e9d7a355a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 751.672336] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: afa25f89-ccda-4b77-aaa1-a3b62b53d870] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bd:e5:26', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dcf5c3f7-4e33-4f21-b323-3673930b789c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b4eaba7b-0335-4344-94ab-94e9d7a355a2', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 751.680338] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 751.680757] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: afa25f89-ccda-4b77-aaa1-a3b62b53d870] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 751.681384] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6a295819-3172-4fb1-8216-18c9efaca33a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.706064] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 751.706064] env[69328]: value = "task-3273130" [ 751.706064] env[69328]: _type = "Task" [ 751.706064] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.716056] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273130, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.754698] env[69328]: DEBUG nova.compute.manager [req-bd2a980e-60c5-4e99-8a26-f5d8f76b7f89 req-b84ccc8a-287d-4f41-8288-cdc4c74ca776 service nova] [instance: afa25f89-ccda-4b77-aaa1-a3b62b53d870] Received event network-vif-plugged-b4eaba7b-0335-4344-94ab-94e9d7a355a2 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 751.754768] env[69328]: DEBUG oslo_concurrency.lockutils [req-bd2a980e-60c5-4e99-8a26-f5d8f76b7f89 req-b84ccc8a-287d-4f41-8288-cdc4c74ca776 service nova] Acquiring lock "afa25f89-ccda-4b77-aaa1-a3b62b53d870-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 751.754971] env[69328]: DEBUG oslo_concurrency.lockutils [req-bd2a980e-60c5-4e99-8a26-f5d8f76b7f89 req-b84ccc8a-287d-4f41-8288-cdc4c74ca776 service nova] Lock "afa25f89-ccda-4b77-aaa1-a3b62b53d870-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 751.755158] env[69328]: DEBUG oslo_concurrency.lockutils [req-bd2a980e-60c5-4e99-8a26-f5d8f76b7f89 req-b84ccc8a-287d-4f41-8288-cdc4c74ca776 service nova] Lock "afa25f89-ccda-4b77-aaa1-a3b62b53d870-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 751.755323] env[69328]: DEBUG nova.compute.manager [req-bd2a980e-60c5-4e99-8a26-f5d8f76b7f89 req-b84ccc8a-287d-4f41-8288-cdc4c74ca776 service nova] [instance: afa25f89-ccda-4b77-aaa1-a3b62b53d870] No waiting events found dispatching network-vif-plugged-b4eaba7b-0335-4344-94ab-94e9d7a355a2 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 751.755482] env[69328]: WARNING nova.compute.manager [req-bd2a980e-60c5-4e99-8a26-f5d8f76b7f89 req-b84ccc8a-287d-4f41-8288-cdc4c74ca776 service nova] [instance: afa25f89-ccda-4b77-aaa1-a3b62b53d870] Received unexpected event network-vif-plugged-b4eaba7b-0335-4344-94ab-94e9d7a355a2 for instance with vm_state building and task_state spawning. [ 751.755636] env[69328]: DEBUG nova.compute.manager [req-bd2a980e-60c5-4e99-8a26-f5d8f76b7f89 req-b84ccc8a-287d-4f41-8288-cdc4c74ca776 service nova] [instance: afa25f89-ccda-4b77-aaa1-a3b62b53d870] Received event network-changed-b4eaba7b-0335-4344-94ab-94e9d7a355a2 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 751.755785] env[69328]: DEBUG nova.compute.manager [req-bd2a980e-60c5-4e99-8a26-f5d8f76b7f89 req-b84ccc8a-287d-4f41-8288-cdc4c74ca776 service nova] [instance: afa25f89-ccda-4b77-aaa1-a3b62b53d870] Refreshing instance network info cache due to event network-changed-b4eaba7b-0335-4344-94ab-94e9d7a355a2. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 751.756597] env[69328]: DEBUG oslo_concurrency.lockutils [req-bd2a980e-60c5-4e99-8a26-f5d8f76b7f89 req-b84ccc8a-287d-4f41-8288-cdc4c74ca776 service nova] Acquiring lock "refresh_cache-afa25f89-ccda-4b77-aaa1-a3b62b53d870" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.756597] env[69328]: DEBUG oslo_concurrency.lockutils [req-bd2a980e-60c5-4e99-8a26-f5d8f76b7f89 req-b84ccc8a-287d-4f41-8288-cdc4c74ca776 service nova] Acquired lock "refresh_cache-afa25f89-ccda-4b77-aaa1-a3b62b53d870" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 751.756597] env[69328]: DEBUG nova.network.neutron [req-bd2a980e-60c5-4e99-8a26-f5d8f76b7f89 req-b84ccc8a-287d-4f41-8288-cdc4c74ca776 service nova] [instance: afa25f89-ccda-4b77-aaa1-a3b62b53d870] Refreshing network info cache for port b4eaba7b-0335-4344-94ab-94e9d7a355a2 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 751.836036] env[69328]: DEBUG nova.network.neutron [req-fa0dc1d2-c2fc-421b-abe6-07441b01f6ab req-45f0f569-7e72-4c4a-9a56-79647e1bd40b service nova] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Updated VIF entry in instance network info cache for port 1b80d882-8edf-4d06-a91a-6568cce981e8. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 751.836431] env[69328]: DEBUG nova.network.neutron [req-fa0dc1d2-c2fc-421b-abe6-07441b01f6ab req-45f0f569-7e72-4c4a-9a56-79647e1bd40b service nova] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Updating instance_info_cache with network_info: [{"id": "1b80d882-8edf-4d06-a91a-6568cce981e8", "address": "fa:16:3e:32:ef:98", "network": {"id": "2060ab72-61b3-4aea-bcdd-0b8a76a11fc7", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1995944610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65edf1e9f4344038878d05021bbdef78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b80d882-8e", "ovs_interfaceid": "1b80d882-8edf-4d06-a91a-6568cce981e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 751.857568] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7ff24e32-9bdb-4e6c-85cb-d89873012504 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 751.857789] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7ff24e32-9bdb-4e6c-85cb-d89873012504 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 751.857970] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ff24e32-9bdb-4e6c-85cb-d89873012504 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Deleting the datastore file [datastore1] 3923403b-2e8f-4033-89ee-9a907aff1d49 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 751.858492] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c5ccf323-f0ab-4b29-86d1-81f4c9cc4dc1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.866111] env[69328]: DEBUG oslo_vmware.api [None req-7ff24e32-9bdb-4e6c-85cb-d89873012504 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Waiting for the task: (returnval){ [ 751.866111] env[69328]: value = "task-3273131" [ 751.866111] env[69328]: _type = "Task" [ 751.866111] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.875480] env[69328]: DEBUG oslo_vmware.api [None req-7ff24e32-9bdb-4e6c-85cb-d89873012504 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Task: {'id': task-3273131, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.991538] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance d10bee67-6294-4537-9ce7-4eedb8361ddc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 752.004287] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': task-3273128, 'name': PowerOnVM_Task, 'duration_secs': 0.572293} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.005167] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 5292b759-9d1f-486a-b4d6-90519b3ae986] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 752.005375] env[69328]: INFO nova.compute.manager [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 5292b759-9d1f-486a-b4d6-90519b3ae986] Took 8.52 seconds to spawn the instance on the hypervisor. [ 752.005569] env[69328]: DEBUG nova.compute.manager [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 5292b759-9d1f-486a-b4d6-90519b3ae986] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 752.006399] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fee6e63d-4507-4f24-905d-fee2a2d646e9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.066030] env[69328]: DEBUG nova.compute.manager [req-0427e6d4-4029-4256-bc00-add4b6526818 req-80d30194-c38b-44e9-a6c1-a84a49c66f94 service nova] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Received event network-changed-efc73e76-7767-42a3-b5a4-3891364b487f {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 752.066149] env[69328]: DEBUG nova.compute.manager [req-0427e6d4-4029-4256-bc00-add4b6526818 req-80d30194-c38b-44e9-a6c1-a84a49c66f94 service nova] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Refreshing instance network info cache due to event network-changed-efc73e76-7767-42a3-b5a4-3891364b487f. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 752.066354] env[69328]: DEBUG oslo_concurrency.lockutils [req-0427e6d4-4029-4256-bc00-add4b6526818 req-80d30194-c38b-44e9-a6c1-a84a49c66f94 service nova] Acquiring lock "refresh_cache-e92953f4-b634-4ef9-a5ad-63a886cfa007" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 752.066485] env[69328]: DEBUG oslo_concurrency.lockutils [req-0427e6d4-4029-4256-bc00-add4b6526818 req-80d30194-c38b-44e9-a6c1-a84a49c66f94 service nova] Acquired lock "refresh_cache-e92953f4-b634-4ef9-a5ad-63a886cfa007" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 752.066651] env[69328]: DEBUG nova.network.neutron [req-0427e6d4-4029-4256-bc00-add4b6526818 req-80d30194-c38b-44e9-a6c1-a84a49c66f94 service nova] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Refreshing network info cache for port efc73e76-7767-42a3-b5a4-3891364b487f {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 752.080060] env[69328]: DEBUG oslo_concurrency.lockutils [None req-da0f032d-986d-4ac6-b9b1-dd7f3fc00d76 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "c465c53f-d96b-461b-b8ff-b19929b4f789" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 752.080060] env[69328]: DEBUG oslo_concurrency.lockutils [None req-da0f032d-986d-4ac6-b9b1-dd7f3fc00d76 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "c465c53f-d96b-461b-b8ff-b19929b4f789" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 752.080060] env[69328]: INFO nova.compute.manager [None req-da0f032d-986d-4ac6-b9b1-dd7f3fc00d76 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Attaching volume 8d078387-725a-4917-a1c9-b494ee2ec127 to /dev/sdb [ 752.086969] env[69328]: INFO nova.compute.manager [-] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Took 1.18 seconds to deallocate network for instance. [ 752.115220] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cafdc68c-c584-4ea5-98f4-c0399400881d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.122855] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11138fa9-9b3b-4a20-99a7-c05740b1cc21 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.137432] env[69328]: DEBUG nova.virt.block_device [None req-da0f032d-986d-4ac6-b9b1-dd7f3fc00d76 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Updating existing volume attachment record: da1b2764-345b-4c45-8775-247f36a2a979 {{(pid=69328) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 752.218724] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273130, 'name': CreateVM_Task} progress is 25%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.339862] env[69328]: DEBUG oslo_concurrency.lockutils [req-fa0dc1d2-c2fc-421b-abe6-07441b01f6ab req-45f0f569-7e72-4c4a-9a56-79647e1bd40b service nova] Releasing lock "refresh_cache-3923403b-2e8f-4033-89ee-9a907aff1d49" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 752.376551] env[69328]: DEBUG oslo_vmware.api [None req-7ff24e32-9bdb-4e6c-85cb-d89873012504 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Task: {'id': task-3273131, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.480638} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.376815] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ff24e32-9bdb-4e6c-85cb-d89873012504 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 752.377010] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7ff24e32-9bdb-4e6c-85cb-d89873012504 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 752.377193] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7ff24e32-9bdb-4e6c-85cb-d89873012504 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 752.377369] env[69328]: INFO nova.compute.manager [None req-7ff24e32-9bdb-4e6c-85cb-d89873012504 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Took 1.50 seconds to destroy the instance on the hypervisor. [ 752.377610] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7ff24e32-9bdb-4e6c-85cb-d89873012504 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 752.380086] env[69328]: DEBUG nova.compute.manager [-] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 752.380224] env[69328]: DEBUG nova.network.neutron [-] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 752.494624] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 4d320c76-45bb-451c-8fbb-3dd2d64f56d5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 752.683484] env[69328]: DEBUG nova.network.neutron [req-bd2a980e-60c5-4e99-8a26-f5d8f76b7f89 req-b84ccc8a-287d-4f41-8288-cdc4c74ca776 service nova] [instance: afa25f89-ccda-4b77-aaa1-a3b62b53d870] Updated VIF entry in instance network info cache for port b4eaba7b-0335-4344-94ab-94e9d7a355a2. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 752.683869] env[69328]: DEBUG nova.network.neutron [req-bd2a980e-60c5-4e99-8a26-f5d8f76b7f89 req-b84ccc8a-287d-4f41-8288-cdc4c74ca776 service nova] [instance: afa25f89-ccda-4b77-aaa1-a3b62b53d870] Updating instance_info_cache with network_info: [{"id": "b4eaba7b-0335-4344-94ab-94e9d7a355a2", "address": "fa:16:3e:bd:e5:26", "network": {"id": "b18f73ad-2ff8-4679-853a-f27ced0e60fd", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-906269769-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a019831c6ff240f3bfbf2c2bff104788", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dcf5c3f7-4e33-4f21-b323-3673930b789c", "external-id": "nsx-vlan-transportzone-983", "segmentation_id": 983, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4eaba7b-03", "ovs_interfaceid": "b4eaba7b-0335-4344-94ab-94e9d7a355a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.717414] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273130, 'name': CreateVM_Task} progress is 25%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.820769] env[69328]: DEBUG nova.network.neutron [req-0427e6d4-4029-4256-bc00-add4b6526818 req-80d30194-c38b-44e9-a6c1-a84a49c66f94 service nova] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Updated VIF entry in instance network info cache for port efc73e76-7767-42a3-b5a4-3891364b487f. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 752.821172] env[69328]: DEBUG nova.network.neutron [req-0427e6d4-4029-4256-bc00-add4b6526818 req-80d30194-c38b-44e9-a6c1-a84a49c66f94 service nova] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Updating instance_info_cache with network_info: [{"id": "efc73e76-7767-42a3-b5a4-3891364b487f", "address": "fa:16:3e:0d:8d:73", "network": {"id": "2060ab72-61b3-4aea-bcdd-0b8a76a11fc7", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1995944610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.195", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65edf1e9f4344038878d05021bbdef78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefc73e76-77", "ovs_interfaceid": "efc73e76-7767-42a3-b5a4-3891364b487f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.999126] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 55f44102-2891-4b6c-b31e-e8255a24d180 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 753.188516] env[69328]: DEBUG oslo_concurrency.lockutils [req-bd2a980e-60c5-4e99-8a26-f5d8f76b7f89 req-b84ccc8a-287d-4f41-8288-cdc4c74ca776 service nova] Releasing lock "refresh_cache-afa25f89-ccda-4b77-aaa1-a3b62b53d870" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 753.217555] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273130, 'name': CreateVM_Task} progress is 25%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.323988] env[69328]: DEBUG oslo_concurrency.lockutils [req-0427e6d4-4029-4256-bc00-add4b6526818 req-80d30194-c38b-44e9-a6c1-a84a49c66f94 service nova] Releasing lock "refresh_cache-e92953f4-b634-4ef9-a5ad-63a886cfa007" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 753.502797] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 07b1f872-02bc-471f-97d6-3a781075bee5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 753.718432] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273130, 'name': CreateVM_Task} progress is 25%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.006026] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance ef7effe4-b37f-4fab-ad24-9d8f72a47ee2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 754.220972] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273130, 'name': CreateVM_Task} progress is 25%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.508834] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 754.509214] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Total usable vcpus: 48, total allocated vcpus: 12 {{(pid=69328) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 754.509367] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2880MB phys_disk=200GB used_disk=12GB total_vcpus=48 used_vcpus=12 pci_stats=[] {{(pid=69328) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 754.721741] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273130, 'name': CreateVM_Task} progress is 25%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.928401] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed95de32-2521-481f-91aa-13edef11195b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.936447] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9033d30-eb90-4911-af1b-3ce930a6890d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.967319] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93909b60-fbc6-4c39-a37d-bd80ec1eb2fa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.975600] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27cf27bb-b7d8-4000-b9af-c72e3e6ffd38 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.989616] env[69328]: DEBUG nova.compute.provider_tree [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 755.221216] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273130, 'name': CreateVM_Task} progress is 25%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.492401] env[69328]: DEBUG nova.scheduler.client.report [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 755.532257] env[69328]: INFO nova.compute.manager [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 5292b759-9d1f-486a-b4d6-90519b3ae986] Took 47.48 seconds to build instance. [ 755.595967] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fd9effd8-506c-4bc3-8d83-942135c74d2b tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 755.733329] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273130, 'name': CreateVM_Task} progress is 25%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.808151] env[69328]: DEBUG nova.compute.manager [req-227be686-df13-4d08-b90f-561fd7bb9aa3 req-3a32c99c-fda5-4e88-8f1d-ebe4a387c93d service nova] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Received event network-vif-deleted-1b80d882-8edf-4d06-a91a-6568cce981e8 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 755.808359] env[69328]: INFO nova.compute.manager [req-227be686-df13-4d08-b90f-561fd7bb9aa3 req-3a32c99c-fda5-4e88-8f1d-ebe4a387c93d service nova] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Neutron deleted interface 1b80d882-8edf-4d06-a91a-6568cce981e8; detaching it from the instance and deleting it from the info cache [ 755.808532] env[69328]: DEBUG nova.network.neutron [req-227be686-df13-4d08-b90f-561fd7bb9aa3 req-3a32c99c-fda5-4e88-8f1d-ebe4a387c93d service nova] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 755.997757] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69328) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 755.998761] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 9.616s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 755.998761] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 42.358s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 755.999966] env[69328]: INFO nova.compute.claims [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 756.036887] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Lock "5292b759-9d1f-486a-b4d6-90519b3ae986" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 81.073s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 756.113101] env[69328]: DEBUG nova.network.neutron [-] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 756.224604] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273130, 'name': CreateVM_Task} progress is 25%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.313857] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-842af522-8f60-4ed7-9966-85d461ae2d6d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.324320] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfc64d9f-8a51-4db8-855a-e8c8b1358a53 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.359109] env[69328]: DEBUG nova.compute.manager [req-227be686-df13-4d08-b90f-561fd7bb9aa3 req-3a32c99c-fda5-4e88-8f1d-ebe4a387c93d service nova] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Detach interface failed, port_id=1b80d882-8edf-4d06-a91a-6568cce981e8, reason: Instance 3923403b-2e8f-4033-89ee-9a907aff1d49 could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 756.539502] env[69328]: DEBUG nova.compute.manager [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 756.616801] env[69328]: INFO nova.compute.manager [-] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Took 4.24 seconds to deallocate network for instance. [ 756.724746] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273130, 'name': CreateVM_Task, 'duration_secs': 4.973761} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.724909] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: afa25f89-ccda-4b77-aaa1-a3b62b53d870] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 756.725764] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 756.725928] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 756.726289] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 756.726565] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b5a56de2-536f-46e9-ab17-ba1a0144c18e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.732380] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Waiting for the task: (returnval){ [ 756.732380] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]526832e9-16bb-c158-6c50-62382e43fe66" [ 756.732380] env[69328]: _type = "Task" [ 756.732380] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.741117] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]526832e9-16bb-c158-6c50-62382e43fe66, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.061702] env[69328]: DEBUG oslo_concurrency.lockutils [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 757.124353] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7ff24e32-9bdb-4e6c-85cb-d89873012504 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 757.244308] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]526832e9-16bb-c158-6c50-62382e43fe66, 'name': SearchDatastore_Task, 'duration_secs': 0.045312} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.247099] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 757.247358] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: afa25f89-ccda-4b77-aaa1-a3b62b53d870] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 757.247598] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.247743] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 757.247917] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 757.249269] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-78d22e55-b1de-47a9-9ee0-62b8286af738 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.266023] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 757.266226] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 757.266998] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a123a450-20d1-4b8c-b458-aed4303dde1e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.278902] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Waiting for the task: (returnval){ [ 757.278902] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52639f40-5b1f-7a11-ed37-ce83b6c5c8df" [ 757.278902] env[69328]: _type = "Task" [ 757.278902] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.287606] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52639f40-5b1f-7a11-ed37-ce83b6c5c8df, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.544512] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1227a8b-8828-4573-bee7-0c888e0695a5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.553011] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-978e3f20-204c-463d-9ec6-7c2b9f1ea333 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.583935] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-806f4d63-20b0-4c55-953f-75448c95eb8a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.592844] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b471a6e-73ce-4550-80ca-97cb2dda0c81 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.606961] env[69328]: DEBUG nova.compute.provider_tree [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 757.790397] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52639f40-5b1f-7a11-ed37-ce83b6c5c8df, 'name': SearchDatastore_Task, 'duration_secs': 0.040839} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.791264] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70e7bca7-677a-4668-8b88-1c88d0045c40 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.797197] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Waiting for the task: (returnval){ [ 757.797197] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]520b5b79-1da6-9b00-ef64-b57f7e4c31b0" [ 757.797197] env[69328]: _type = "Task" [ 757.797197] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.805496] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]520b5b79-1da6-9b00-ef64-b57f7e4c31b0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.841531] env[69328]: DEBUG nova.compute.manager [req-6c48f5e3-9264-49d1-bf71-6c1becf7a056 req-c0b2336d-8104-42d4-bc2a-9b57910dd5be service nova] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Received event network-changed-efc73e76-7767-42a3-b5a4-3891364b487f {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 757.841531] env[69328]: DEBUG nova.compute.manager [req-6c48f5e3-9264-49d1-bf71-6c1becf7a056 req-c0b2336d-8104-42d4-bc2a-9b57910dd5be service nova] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Refreshing instance network info cache due to event network-changed-efc73e76-7767-42a3-b5a4-3891364b487f. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 757.841531] env[69328]: DEBUG oslo_concurrency.lockutils [req-6c48f5e3-9264-49d1-bf71-6c1becf7a056 req-c0b2336d-8104-42d4-bc2a-9b57910dd5be service nova] Acquiring lock "refresh_cache-e92953f4-b634-4ef9-a5ad-63a886cfa007" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.841706] env[69328]: DEBUG oslo_concurrency.lockutils [req-6c48f5e3-9264-49d1-bf71-6c1becf7a056 req-c0b2336d-8104-42d4-bc2a-9b57910dd5be service nova] Acquired lock "refresh_cache-e92953f4-b634-4ef9-a5ad-63a886cfa007" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 757.841742] env[69328]: DEBUG nova.network.neutron [req-6c48f5e3-9264-49d1-bf71-6c1becf7a056 req-c0b2336d-8104-42d4-bc2a-9b57910dd5be service nova] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Refreshing network info cache for port efc73e76-7767-42a3-b5a4-3891364b487f {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 758.109916] env[69328]: DEBUG nova.scheduler.client.report [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 758.310918] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]520b5b79-1da6-9b00-ef64-b57f7e4c31b0, 'name': SearchDatastore_Task, 'duration_secs': 0.010605} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.311290] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 758.311531] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] afa25f89-ccda-4b77-aaa1-a3b62b53d870/afa25f89-ccda-4b77-aaa1-a3b62b53d870.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 758.311795] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c8802f61-0d8c-4481-8bf3-d187b22a9f74 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.320077] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Waiting for the task: (returnval){ [ 758.320077] env[69328]: value = "task-3273136" [ 758.320077] env[69328]: _type = "Task" [ 758.320077] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.329273] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': task-3273136, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.615431] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.617s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 758.616040] env[69328]: DEBUG nova.compute.manager [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 758.623081] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6f170196-6d2e-4f2f-8577-f6832c4cd8bf tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 44.982s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 758.623308] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6f170196-6d2e-4f2f-8577-f6832c4cd8bf tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 758.625311] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 41.232s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 758.625542] env[69328]: DEBUG nova.objects.instance [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69328) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 758.664376] env[69328]: INFO nova.scheduler.client.report [None req-6f170196-6d2e-4f2f-8577-f6832c4cd8bf tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Deleted allocations for instance 676173ee-8001-48c6-bd28-09130f6dd99a [ 758.692171] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-da0f032d-986d-4ac6-b9b1-dd7f3fc00d76 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Volume attach. Driver type: vmdk {{(pid=69328) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 758.692443] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-da0f032d-986d-4ac6-b9b1-dd7f3fc00d76 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653776', 'volume_id': '8d078387-725a-4917-a1c9-b494ee2ec127', 'name': 'volume-8d078387-725a-4917-a1c9-b494ee2ec127', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c465c53f-d96b-461b-b8ff-b19929b4f789', 'attached_at': '', 'detached_at': '', 'volume_id': '8d078387-725a-4917-a1c9-b494ee2ec127', 'serial': '8d078387-725a-4917-a1c9-b494ee2ec127'} {{(pid=69328) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 758.693534] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e886872b-aa77-44d7-a640-d74ea65e2ca1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.699849] env[69328]: DEBUG nova.network.neutron [req-6c48f5e3-9264-49d1-bf71-6c1becf7a056 req-c0b2336d-8104-42d4-bc2a-9b57910dd5be service nova] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Updated VIF entry in instance network info cache for port efc73e76-7767-42a3-b5a4-3891364b487f. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 758.700573] env[69328]: DEBUG nova.network.neutron [req-6c48f5e3-9264-49d1-bf71-6c1becf7a056 req-c0b2336d-8104-42d4-bc2a-9b57910dd5be service nova] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Updating instance_info_cache with network_info: [{"id": "efc73e76-7767-42a3-b5a4-3891364b487f", "address": "fa:16:3e:0d:8d:73", "network": {"id": "2060ab72-61b3-4aea-bcdd-0b8a76a11fc7", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1995944610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65edf1e9f4344038878d05021bbdef78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "41278529-8bd2-44a1-97c8-03967faa3ff7", "external-id": "nsx-vlan-transportzone-749", "segmentation_id": 749, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefc73e76-77", "ovs_interfaceid": "efc73e76-7767-42a3-b5a4-3891364b487f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.723673] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5a4f00c-8cdc-48f8-a45c-a85b529cf6bc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.754832] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-da0f032d-986d-4ac6-b9b1-dd7f3fc00d76 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Reconfiguring VM instance instance-00000026 to attach disk [datastore2] volume-8d078387-725a-4917-a1c9-b494ee2ec127/volume-8d078387-725a-4917-a1c9-b494ee2ec127.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 758.755684] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7ab60ac0-0efd-4b8c-a280-60b4dbd407a8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.777658] env[69328]: DEBUG oslo_vmware.api [None req-da0f032d-986d-4ac6-b9b1-dd7f3fc00d76 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 758.777658] env[69328]: value = "task-3273137" [ 758.777658] env[69328]: _type = "Task" [ 758.777658] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.789566] env[69328]: DEBUG oslo_vmware.api [None req-da0f032d-986d-4ac6-b9b1-dd7f3fc00d76 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273137, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.830535] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': task-3273136, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.132488] env[69328]: DEBUG nova.compute.utils [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 759.136538] env[69328]: DEBUG nova.compute.manager [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 759.136728] env[69328]: DEBUG nova.network.neutron [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 759.173222] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6f170196-6d2e-4f2f-8577-f6832c4cd8bf tempest-ServersAdminNegativeTestJSON-1126884074 tempest-ServersAdminNegativeTestJSON-1126884074-project-member] Lock "676173ee-8001-48c6-bd28-09130f6dd99a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 49.369s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 759.197253] env[69328]: DEBUG nova.policy [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b896180d007146d9b95cb7b9aa75f8e2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a019831c6ff240f3bfbf2c2bff104788', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 759.203793] env[69328]: DEBUG oslo_concurrency.lockutils [req-6c48f5e3-9264-49d1-bf71-6c1becf7a056 req-c0b2336d-8104-42d4-bc2a-9b57910dd5be service nova] Releasing lock "refresh_cache-e92953f4-b634-4ef9-a5ad-63a886cfa007" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 759.289434] env[69328]: DEBUG oslo_vmware.api [None req-da0f032d-986d-4ac6-b9b1-dd7f3fc00d76 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273137, 'name': ReconfigVM_Task, 'duration_secs': 0.394499} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.289718] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-da0f032d-986d-4ac6-b9b1-dd7f3fc00d76 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Reconfigured VM instance instance-00000026 to attach disk [datastore2] volume-8d078387-725a-4917-a1c9-b494ee2ec127/volume-8d078387-725a-4917-a1c9-b494ee2ec127.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 759.294759] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3ea0275f-7865-4fd8-b076-8e685b23d978 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.311448] env[69328]: DEBUG oslo_vmware.api [None req-da0f032d-986d-4ac6-b9b1-dd7f3fc00d76 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 759.311448] env[69328]: value = "task-3273138" [ 759.311448] env[69328]: _type = "Task" [ 759.311448] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.328464] env[69328]: DEBUG oslo_vmware.api [None req-da0f032d-986d-4ac6-b9b1-dd7f3fc00d76 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273138, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.336166] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': task-3273136, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.613701] env[69328]: DEBUG nova.network.neutron [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] Successfully created port: 7f5ae309-7210-4bdd-8fd5-67e654004662 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 759.637735] env[69328]: DEBUG nova.compute.manager [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 759.644815] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6a38a4a1-7c2c-4e04-873c-89bb85f8109e tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.016s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 759.645622] env[69328]: DEBUG oslo_concurrency.lockutils [None req-008d5d67-cb82-4672-af1b-6d9c3e4bb77a tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 41.029s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 759.645761] env[69328]: DEBUG oslo_concurrency.lockutils [None req-008d5d67-cb82-4672-af1b-6d9c3e4bb77a tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 759.648218] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6f98f3ad-8c53-48d7-a859-d056e9f43bbd tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.074s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 759.648218] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6f98f3ad-8c53-48d7-a859-d056e9f43bbd tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 759.652411] env[69328]: DEBUG oslo_concurrency.lockutils [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.671s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 759.654067] env[69328]: INFO nova.compute.claims [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 759.698204] env[69328]: INFO nova.scheduler.client.report [None req-008d5d67-cb82-4672-af1b-6d9c3e4bb77a tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Deleted allocations for instance bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f [ 759.702148] env[69328]: INFO nova.scheduler.client.report [None req-6f98f3ad-8c53-48d7-a859-d056e9f43bbd tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Deleted allocations for instance b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25 [ 759.823899] env[69328]: DEBUG oslo_vmware.api [None req-da0f032d-986d-4ac6-b9b1-dd7f3fc00d76 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273138, 'name': ReconfigVM_Task, 'duration_secs': 0.158539} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.827724] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-da0f032d-986d-4ac6-b9b1-dd7f3fc00d76 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653776', 'volume_id': '8d078387-725a-4917-a1c9-b494ee2ec127', 'name': 'volume-8d078387-725a-4917-a1c9-b494ee2ec127', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c465c53f-d96b-461b-b8ff-b19929b4f789', 'attached_at': '', 'detached_at': '', 'volume_id': '8d078387-725a-4917-a1c9-b494ee2ec127', 'serial': '8d078387-725a-4917-a1c9-b494ee2ec127'} {{(pid=69328) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 759.841403] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': task-3273136, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.504991} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.841722] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] afa25f89-ccda-4b77-aaa1-a3b62b53d870/afa25f89-ccda-4b77-aaa1-a3b62b53d870.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 759.842060] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: afa25f89-ccda-4b77-aaa1-a3b62b53d870] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 759.842347] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ad0300c5-8665-46b7-9fef-147422ffe54a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.855353] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Waiting for the task: (returnval){ [ 759.855353] env[69328]: value = "task-3273139" [ 759.855353] env[69328]: _type = "Task" [ 759.855353] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.864697] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': task-3273139, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.213176] env[69328]: DEBUG oslo_concurrency.lockutils [None req-008d5d67-cb82-4672-af1b-6d9c3e4bb77a tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Lock "bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.584s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 760.214280] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6f98f3ad-8c53-48d7-a859-d056e9f43bbd tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.604s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 760.364694] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': task-3273139, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075171} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.365463] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: afa25f89-ccda-4b77-aaa1-a3b62b53d870] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 760.366121] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-732a8d0f-93aa-4c97-afa3-3a9bf6e6513c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.398723] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: afa25f89-ccda-4b77-aaa1-a3b62b53d870] Reconfiguring VM instance instance-00000028 to attach disk [datastore2] afa25f89-ccda-4b77-aaa1-a3b62b53d870/afa25f89-ccda-4b77-aaa1-a3b62b53d870.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 760.399745] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f34e4b5e-0c5a-49e5-b416-43c46b6ba4c6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.421017] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Waiting for the task: (returnval){ [ 760.421017] env[69328]: value = "task-3273140" [ 760.421017] env[69328]: _type = "Task" [ 760.421017] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.434877] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': task-3273140, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.652563] env[69328]: DEBUG nova.compute.manager [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 760.682773] env[69328]: DEBUG nova.virt.hardware [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 760.683084] env[69328]: DEBUG nova.virt.hardware [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 760.683309] env[69328]: DEBUG nova.virt.hardware [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 760.683560] env[69328]: DEBUG nova.virt.hardware [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 760.683766] env[69328]: DEBUG nova.virt.hardware [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 760.683936] env[69328]: DEBUG nova.virt.hardware [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 760.684248] env[69328]: DEBUG nova.virt.hardware [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 760.684420] env[69328]: DEBUG nova.virt.hardware [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 760.684588] env[69328]: DEBUG nova.virt.hardware [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 760.684778] env[69328]: DEBUG nova.virt.hardware [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 760.684978] env[69328]: DEBUG nova.virt.hardware [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 760.685988] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc6d9bd2-504d-4aee-8cbc-ff839426f83b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.700988] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da584c81-3fa8-47e9-b912-7877e7fe8114 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.877704] env[69328]: DEBUG nova.objects.instance [None req-da0f032d-986d-4ac6-b9b1-dd7f3fc00d76 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lazy-loading 'flavor' on Instance uuid c465c53f-d96b-461b-b8ff-b19929b4f789 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 760.932781] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': task-3273140, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.222861] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-821bd43f-6d44-44e3-9dd6-952d741e9f20 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.232467] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1df5fdee-c438-45b2-b24c-833c07183645 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.266836] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-039dac2b-e46e-4fe0-9e90-fc3f43bcc287 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.276048] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-211f46ff-1598-45dc-b3d2-1ed11b16aec8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.291046] env[69328]: DEBUG nova.compute.provider_tree [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 761.380703] env[69328]: DEBUG oslo_concurrency.lockutils [None req-da0f032d-986d-4ac6-b9b1-dd7f3fc00d76 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "c465c53f-d96b-461b-b8ff-b19929b4f789" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 9.301s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 761.432674] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': task-3273140, 'name': ReconfigVM_Task, 'duration_secs': 0.520802} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.432990] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: afa25f89-ccda-4b77-aaa1-a3b62b53d870] Reconfigured VM instance instance-00000028 to attach disk [datastore2] afa25f89-ccda-4b77-aaa1-a3b62b53d870/afa25f89-ccda-4b77-aaa1-a3b62b53d870.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 761.433709] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c94dc94d-51af-42a9-ada7-1774a70f228d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.442967] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Waiting for the task: (returnval){ [ 761.442967] env[69328]: value = "task-3273141" [ 761.442967] env[69328]: _type = "Task" [ 761.442967] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.452770] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': task-3273141, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.470026] env[69328]: DEBUG nova.compute.manager [req-994f16bb-29dd-4a10-b6b5-628a4f95c167 req-e521398d-8e4a-4990-8a58-d995584219f1 service nova] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] Received event network-vif-plugged-7f5ae309-7210-4bdd-8fd5-67e654004662 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 761.470243] env[69328]: DEBUG oslo_concurrency.lockutils [req-994f16bb-29dd-4a10-b6b5-628a4f95c167 req-e521398d-8e4a-4990-8a58-d995584219f1 service nova] Acquiring lock "690096cf-a0bd-4db1-ad97-8d8a37ad7c84-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 761.470485] env[69328]: DEBUG oslo_concurrency.lockutils [req-994f16bb-29dd-4a10-b6b5-628a4f95c167 req-e521398d-8e4a-4990-8a58-d995584219f1 service nova] Lock "690096cf-a0bd-4db1-ad97-8d8a37ad7c84-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 761.470645] env[69328]: DEBUG oslo_concurrency.lockutils [req-994f16bb-29dd-4a10-b6b5-628a4f95c167 req-e521398d-8e4a-4990-8a58-d995584219f1 service nova] Lock "690096cf-a0bd-4db1-ad97-8d8a37ad7c84-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 761.470820] env[69328]: DEBUG nova.compute.manager [req-994f16bb-29dd-4a10-b6b5-628a4f95c167 req-e521398d-8e4a-4990-8a58-d995584219f1 service nova] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] No waiting events found dispatching network-vif-plugged-7f5ae309-7210-4bdd-8fd5-67e654004662 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 761.471581] env[69328]: WARNING nova.compute.manager [req-994f16bb-29dd-4a10-b6b5-628a4f95c167 req-e521398d-8e4a-4990-8a58-d995584219f1 service nova] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] Received unexpected event network-vif-plugged-7f5ae309-7210-4bdd-8fd5-67e654004662 for instance with vm_state building and task_state spawning. [ 761.593685] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c002fcab-f554-4c43-ac90-8d8f68bdc43a tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "c465c53f-d96b-461b-b8ff-b19929b4f789" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 761.594123] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c002fcab-f554-4c43-ac90-8d8f68bdc43a tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "c465c53f-d96b-461b-b8ff-b19929b4f789" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 761.594212] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c002fcab-f554-4c43-ac90-8d8f68bdc43a tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "c465c53f-d96b-461b-b8ff-b19929b4f789-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 761.594400] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c002fcab-f554-4c43-ac90-8d8f68bdc43a tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "c465c53f-d96b-461b-b8ff-b19929b4f789-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 761.594569] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c002fcab-f554-4c43-ac90-8d8f68bdc43a tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "c465c53f-d96b-461b-b8ff-b19929b4f789-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 761.597312] env[69328]: INFO nova.compute.manager [None req-c002fcab-f554-4c43-ac90-8d8f68bdc43a tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Terminating instance [ 761.647103] env[69328]: DEBUG nova.network.neutron [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] Successfully updated port: 7f5ae309-7210-4bdd-8fd5-67e654004662 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 761.795202] env[69328]: DEBUG nova.scheduler.client.report [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 761.953025] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': task-3273141, 'name': Rename_Task, 'duration_secs': 0.275889} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.953315] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: afa25f89-ccda-4b77-aaa1-a3b62b53d870] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 761.953972] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9594cbc7-1b87-462c-aae5-17269b8de181 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.961899] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Waiting for the task: (returnval){ [ 761.961899] env[69328]: value = "task-3273142" [ 761.961899] env[69328]: _type = "Task" [ 761.961899] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.971647] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': task-3273142, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.101546] env[69328]: DEBUG nova.compute.manager [None req-c002fcab-f554-4c43-ac90-8d8f68bdc43a tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 762.101806] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-c002fcab-f554-4c43-ac90-8d8f68bdc43a tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 762.102146] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-23097f10-f23f-4f15-946f-a758565fa409 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.109629] env[69328]: DEBUG oslo_vmware.api [None req-c002fcab-f554-4c43-ac90-8d8f68bdc43a tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 762.109629] env[69328]: value = "task-3273143" [ 762.109629] env[69328]: _type = "Task" [ 762.109629] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.119020] env[69328]: DEBUG oslo_vmware.api [None req-c002fcab-f554-4c43-ac90-8d8f68bdc43a tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273143, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.152195] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Acquiring lock "refresh_cache-690096cf-a0bd-4db1-ad97-8d8a37ad7c84" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 762.152195] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Acquired lock "refresh_cache-690096cf-a0bd-4db1-ad97-8d8a37ad7c84" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 762.152195] env[69328]: DEBUG nova.network.neutron [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 762.303029] env[69328]: DEBUG oslo_concurrency.lockutils [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.648s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 762.303029] env[69328]: DEBUG nova.compute.manager [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 762.304230] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7594ffc6-3bcd-41fa-867d-b0ee1a8dc8cd tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.235s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 762.304595] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7594ffc6-3bcd-41fa-867d-b0ee1a8dc8cd tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 762.306744] env[69328]: DEBUG oslo_concurrency.lockutils [None req-57dec915-e838-459b-bd12-33b5b857e7af tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.182s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 762.307070] env[69328]: DEBUG oslo_concurrency.lockutils [None req-57dec915-e838-459b-bd12-33b5b857e7af tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 762.310116] env[69328]: DEBUG oslo_concurrency.lockutils [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.410s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 762.310849] env[69328]: INFO nova.compute.claims [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 762.348566] env[69328]: INFO nova.scheduler.client.report [None req-57dec915-e838-459b-bd12-33b5b857e7af tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Deleted allocations for instance e1eec0ce-8df7-402a-b628-5dfdc11949e7 [ 762.357274] env[69328]: INFO nova.scheduler.client.report [None req-7594ffc6-3bcd-41fa-867d-b0ee1a8dc8cd tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Deleted allocations for instance 8e3a73c1-b622-47f4-99af-71b6dba7c09b [ 762.476645] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': task-3273142, 'name': PowerOnVM_Task, 'duration_secs': 0.472297} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.477456] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: afa25f89-ccda-4b77-aaa1-a3b62b53d870] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 762.478215] env[69328]: INFO nova.compute.manager [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: afa25f89-ccda-4b77-aaa1-a3b62b53d870] Took 14.06 seconds to spawn the instance on the hypervisor. [ 762.479356] env[69328]: DEBUG nova.compute.manager [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: afa25f89-ccda-4b77-aaa1-a3b62b53d870] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 762.480323] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d24aaff-4561-46a1-b1ad-18f61fcd512a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.621478] env[69328]: DEBUG oslo_vmware.api [None req-c002fcab-f554-4c43-ac90-8d8f68bdc43a tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273143, 'name': PowerOffVM_Task, 'duration_secs': 0.238734} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.621707] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-c002fcab-f554-4c43-ac90-8d8f68bdc43a tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 762.621912] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-c002fcab-f554-4c43-ac90-8d8f68bdc43a tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Volume detach. Driver type: vmdk {{(pid=69328) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 762.622309] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-c002fcab-f554-4c43-ac90-8d8f68bdc43a tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653776', 'volume_id': '8d078387-725a-4917-a1c9-b494ee2ec127', 'name': 'volume-8d078387-725a-4917-a1c9-b494ee2ec127', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c465c53f-d96b-461b-b8ff-b19929b4f789', 'attached_at': '', 'detached_at': '', 'volume_id': '8d078387-725a-4917-a1c9-b494ee2ec127', 'serial': '8d078387-725a-4917-a1c9-b494ee2ec127'} {{(pid=69328) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 762.623133] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65ceab19-9571-49ed-ba43-a9cd1b4e9a6f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.649594] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4113d077-f346-4a07-9e9c-4ad30dc2c6d9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.658811] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd528f21-04a1-4a8f-9fbf-bd0756448c67 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.683801] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59921496-0c0b-4f95-9aed-55dda83d2490 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.702811] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-c002fcab-f554-4c43-ac90-8d8f68bdc43a tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] The volume has not been displaced from its original location: [datastore2] volume-8d078387-725a-4917-a1c9-b494ee2ec127/volume-8d078387-725a-4917-a1c9-b494ee2ec127.vmdk. No consolidation needed. {{(pid=69328) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 762.711027] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-c002fcab-f554-4c43-ac90-8d8f68bdc43a tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Reconfiguring VM instance instance-00000026 to detach disk 2001 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 762.711143] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8eb189a3-a0b7-42f8-a2c0-376a93957713 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.725809] env[69328]: DEBUG nova.network.neutron [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 762.734636] env[69328]: DEBUG oslo_vmware.api [None req-c002fcab-f554-4c43-ac90-8d8f68bdc43a tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 762.734636] env[69328]: value = "task-3273144" [ 762.734636] env[69328]: _type = "Task" [ 762.734636] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.744150] env[69328]: DEBUG oslo_vmware.api [None req-c002fcab-f554-4c43-ac90-8d8f68bdc43a tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273144, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.819073] env[69328]: DEBUG nova.compute.utils [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 762.820595] env[69328]: DEBUG nova.compute.manager [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 762.820775] env[69328]: DEBUG nova.network.neutron [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 762.862332] env[69328]: DEBUG oslo_concurrency.lockutils [None req-57dec915-e838-459b-bd12-33b5b857e7af tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Lock "e1eec0ce-8df7-402a-b628-5dfdc11949e7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.474s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 762.871885] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7594ffc6-3bcd-41fa-867d-b0ee1a8dc8cd tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Lock "8e3a73c1-b622-47f4-99af-71b6dba7c09b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.284s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 762.884958] env[69328]: DEBUG nova.policy [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '44b2dc5070104ec48269d8210f0ba2d3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '690511a8725a4dd6ab796a15569293a9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 763.007934] env[69328]: INFO nova.compute.manager [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: afa25f89-ccda-4b77-aaa1-a3b62b53d870] Took 51.79 seconds to build instance. [ 763.087174] env[69328]: DEBUG nova.network.neutron [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] Updating instance_info_cache with network_info: [{"id": "7f5ae309-7210-4bdd-8fd5-67e654004662", "address": "fa:16:3e:3e:bd:14", "network": {"id": "b18f73ad-2ff8-4679-853a-f27ced0e60fd", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-906269769-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a019831c6ff240f3bfbf2c2bff104788", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dcf5c3f7-4e33-4f21-b323-3673930b789c", "external-id": "nsx-vlan-transportzone-983", "segmentation_id": 983, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f5ae309-72", "ovs_interfaceid": "7f5ae309-7210-4bdd-8fd5-67e654004662", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 763.248203] env[69328]: DEBUG oslo_vmware.api [None req-c002fcab-f554-4c43-ac90-8d8f68bdc43a tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273144, 'name': ReconfigVM_Task, 'duration_secs': 0.209298} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.248780] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-c002fcab-f554-4c43-ac90-8d8f68bdc43a tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Reconfigured VM instance instance-00000026 to detach disk 2001 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 763.254447] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8bf6255d-b1ff-484d-bf2e-d874a99b22b0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.272283] env[69328]: DEBUG oslo_vmware.api [None req-c002fcab-f554-4c43-ac90-8d8f68bdc43a tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 763.272283] env[69328]: value = "task-3273145" [ 763.272283] env[69328]: _type = "Task" [ 763.272283] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.282471] env[69328]: DEBUG oslo_vmware.api [None req-c002fcab-f554-4c43-ac90-8d8f68bdc43a tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273145, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.325111] env[69328]: DEBUG nova.compute.manager [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 763.514948] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Lock "afa25f89-ccda-4b77-aaa1-a3b62b53d870" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 88.518s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 763.593194] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Releasing lock "refresh_cache-690096cf-a0bd-4db1-ad97-8d8a37ad7c84" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 763.593194] env[69328]: DEBUG nova.compute.manager [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] Instance network_info: |[{"id": "7f5ae309-7210-4bdd-8fd5-67e654004662", "address": "fa:16:3e:3e:bd:14", "network": {"id": "b18f73ad-2ff8-4679-853a-f27ced0e60fd", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-906269769-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a019831c6ff240f3bfbf2c2bff104788", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dcf5c3f7-4e33-4f21-b323-3673930b789c", "external-id": "nsx-vlan-transportzone-983", "segmentation_id": 983, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f5ae309-72", "ovs_interfaceid": "7f5ae309-7210-4bdd-8fd5-67e654004662", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 763.593464] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3e:bd:14', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dcf5c3f7-4e33-4f21-b323-3673930b789c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7f5ae309-7210-4bdd-8fd5-67e654004662', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 763.600906] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 763.603749] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 763.604431] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2bb40ae1-898c-426a-bc8e-03f6452ffd6a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.627107] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 763.627107] env[69328]: value = "task-3273146" [ 763.627107] env[69328]: _type = "Task" [ 763.627107] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.636862] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273146, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.764992] env[69328]: DEBUG nova.network.neutron [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Successfully created port: 90f7115d-cbd5-42dd-a07a-5eb45deb5276 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 763.784934] env[69328]: DEBUG oslo_vmware.api [None req-c002fcab-f554-4c43-ac90-8d8f68bdc43a tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273145, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.917918] env[69328]: DEBUG nova.compute.manager [req-391929bb-21f4-4b5d-b20a-2e1af2526985 req-84af340a-be11-4d51-a357-75cfa213b378 service nova] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] Received event network-changed-7f5ae309-7210-4bdd-8fd5-67e654004662 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 763.920845] env[69328]: DEBUG nova.compute.manager [req-391929bb-21f4-4b5d-b20a-2e1af2526985 req-84af340a-be11-4d51-a357-75cfa213b378 service nova] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] Refreshing instance network info cache due to event network-changed-7f5ae309-7210-4bdd-8fd5-67e654004662. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 763.920845] env[69328]: DEBUG oslo_concurrency.lockutils [req-391929bb-21f4-4b5d-b20a-2e1af2526985 req-84af340a-be11-4d51-a357-75cfa213b378 service nova] Acquiring lock "refresh_cache-690096cf-a0bd-4db1-ad97-8d8a37ad7c84" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.920845] env[69328]: DEBUG oslo_concurrency.lockutils [req-391929bb-21f4-4b5d-b20a-2e1af2526985 req-84af340a-be11-4d51-a357-75cfa213b378 service nova] Acquired lock "refresh_cache-690096cf-a0bd-4db1-ad97-8d8a37ad7c84" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 763.920845] env[69328]: DEBUG nova.network.neutron [req-391929bb-21f4-4b5d-b20a-2e1af2526985 req-84af340a-be11-4d51-a357-75cfa213b378 service nova] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] Refreshing network info cache for port 7f5ae309-7210-4bdd-8fd5-67e654004662 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 763.940650] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae4026d2-8381-4afa-a52e-d04e7e582612 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.949297] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66fe0b20-dbf3-4f7c-9198-541451dfdebe {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.984577] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a82a25e5-d94b-4100-ade7-e84e24b985a6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.996166] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb223add-55f6-40e4-a494-c7ef1f58728d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.011036] env[69328]: DEBUG nova.compute.provider_tree [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 764.022667] env[69328]: DEBUG nova.compute.manager [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 764.137847] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273146, 'name': CreateVM_Task, 'duration_secs': 0.506483} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.138040] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 764.139027] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 764.139027] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 764.139368] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 764.139585] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0f0a93e7-645a-4d01-a906-05d5c92f15ad {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.144939] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Waiting for the task: (returnval){ [ 764.144939] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d82e83-bc97-be49-f3f3-9e8d07551908" [ 764.144939] env[69328]: _type = "Task" [ 764.144939] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.156068] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d82e83-bc97-be49-f3f3-9e8d07551908, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.286780] env[69328]: DEBUG oslo_vmware.api [None req-c002fcab-f554-4c43-ac90-8d8f68bdc43a tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273145, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.339133] env[69328]: DEBUG nova.compute.manager [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 764.376131] env[69328]: DEBUG nova.virt.hardware [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 764.376434] env[69328]: DEBUG nova.virt.hardware [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 764.378878] env[69328]: DEBUG nova.virt.hardware [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 764.378878] env[69328]: DEBUG nova.virt.hardware [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 764.378878] env[69328]: DEBUG nova.virt.hardware [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 764.378878] env[69328]: DEBUG nova.virt.hardware [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 764.378878] env[69328]: DEBUG nova.virt.hardware [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 764.379372] env[69328]: DEBUG nova.virt.hardware [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 764.379372] env[69328]: DEBUG nova.virt.hardware [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 764.379372] env[69328]: DEBUG nova.virt.hardware [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 764.379372] env[69328]: DEBUG nova.virt.hardware [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 764.379372] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe6a794c-d8b0-4877-a281-992b8a7b680a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.387863] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b03d8eb7-2366-4907-80f5-1b23413556b8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.514240] env[69328]: DEBUG nova.scheduler.client.report [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 764.564668] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 764.599489] env[69328]: DEBUG oslo_concurrency.lockutils [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquiring lock "62fa6807-f67d-4bf5-ba23-9e97f9da120e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 764.599489] env[69328]: DEBUG oslo_concurrency.lockutils [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lock "62fa6807-f67d-4bf5-ba23-9e97f9da120e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 764.658388] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d82e83-bc97-be49-f3f3-9e8d07551908, 'name': SearchDatastore_Task, 'duration_secs': 0.051472} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.658702] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 764.658945] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 764.659226] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 764.659421] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 764.659618] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 764.659878] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-73712bfe-5540-492b-8b59-fe11aaf81d6e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.669620] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 764.669765] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 764.676852] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f94dfc57-2fb0-4ae5-95ab-e9f9ec7f1b09 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.687538] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Waiting for the task: (returnval){ [ 764.687538] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f9115e-da54-1ad7-7892-880ca52ccd68" [ 764.687538] env[69328]: _type = "Task" [ 764.687538] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.695027] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f9115e-da54-1ad7-7892-880ca52ccd68, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.722443] env[69328]: DEBUG nova.network.neutron [req-391929bb-21f4-4b5d-b20a-2e1af2526985 req-84af340a-be11-4d51-a357-75cfa213b378 service nova] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] Updated VIF entry in instance network info cache for port 7f5ae309-7210-4bdd-8fd5-67e654004662. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 764.722649] env[69328]: DEBUG nova.network.neutron [req-391929bb-21f4-4b5d-b20a-2e1af2526985 req-84af340a-be11-4d51-a357-75cfa213b378 service nova] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] Updating instance_info_cache with network_info: [{"id": "7f5ae309-7210-4bdd-8fd5-67e654004662", "address": "fa:16:3e:3e:bd:14", "network": {"id": "b18f73ad-2ff8-4679-853a-f27ced0e60fd", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-906269769-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a019831c6ff240f3bfbf2c2bff104788", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dcf5c3f7-4e33-4f21-b323-3673930b789c", "external-id": "nsx-vlan-transportzone-983", "segmentation_id": 983, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f5ae309-72", "ovs_interfaceid": "7f5ae309-7210-4bdd-8fd5-67e654004662", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 764.787013] env[69328]: DEBUG oslo_vmware.api [None req-c002fcab-f554-4c43-ac90-8d8f68bdc43a tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273145, 'name': ReconfigVM_Task, 'duration_secs': 1.247135} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.787013] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-c002fcab-f554-4c43-ac90-8d8f68bdc43a tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653776', 'volume_id': '8d078387-725a-4917-a1c9-b494ee2ec127', 'name': 'volume-8d078387-725a-4917-a1c9-b494ee2ec127', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c465c53f-d96b-461b-b8ff-b19929b4f789', 'attached_at': '', 'detached_at': '', 'volume_id': '8d078387-725a-4917-a1c9-b494ee2ec127', 'serial': '8d078387-725a-4917-a1c9-b494ee2ec127'} {{(pid=69328) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 764.787013] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c002fcab-f554-4c43-ac90-8d8f68bdc43a tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 764.788030] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86226938-f0ba-41a7-81ae-ff5e8a9d3a7b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.795480] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c002fcab-f554-4c43-ac90-8d8f68bdc43a tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 764.796228] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a39568d8-a073-43d6-b09c-50c30edd1574 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.865157] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c002fcab-f554-4c43-ac90-8d8f68bdc43a tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 764.865432] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c002fcab-f554-4c43-ac90-8d8f68bdc43a tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 764.865694] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-c002fcab-f554-4c43-ac90-8d8f68bdc43a tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Deleting the datastore file [datastore1] c465c53f-d96b-461b-b8ff-b19929b4f789 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 764.866397] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c827d2f2-8ef6-4856-851f-8cbee57dba5e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.875092] env[69328]: DEBUG oslo_vmware.api [None req-c002fcab-f554-4c43-ac90-8d8f68bdc43a tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 764.875092] env[69328]: value = "task-3273148" [ 764.875092] env[69328]: _type = "Task" [ 764.875092] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.884834] env[69328]: DEBUG oslo_vmware.api [None req-c002fcab-f554-4c43-ac90-8d8f68bdc43a tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273148, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.022156] env[69328]: DEBUG oslo_concurrency.lockutils [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.712s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 765.022732] env[69328]: DEBUG nova.compute.manager [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 765.025353] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.122s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 765.027201] env[69328]: INFO nova.compute.claims [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 765.200238] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f9115e-da54-1ad7-7892-880ca52ccd68, 'name': SearchDatastore_Task, 'duration_secs': 0.022518} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.203496] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d3399dba-49d7-42f6-84fd-2e1a075030a2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.208021] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Waiting for the task: (returnval){ [ 765.208021] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]528da874-6da9-80d2-bb9a-8785c0832349" [ 765.208021] env[69328]: _type = "Task" [ 765.208021] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.217824] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]528da874-6da9-80d2-bb9a-8785c0832349, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.227317] env[69328]: DEBUG oslo_concurrency.lockutils [req-391929bb-21f4-4b5d-b20a-2e1af2526985 req-84af340a-be11-4d51-a357-75cfa213b378 service nova] Releasing lock "refresh_cache-690096cf-a0bd-4db1-ad97-8d8a37ad7c84" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 765.385263] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Acquiring lock "3daf7b73-5679-47ce-b847-f3786f1000d4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 765.385263] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Lock "3daf7b73-5679-47ce-b847-f3786f1000d4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 765.396084] env[69328]: DEBUG oslo_vmware.api [None req-c002fcab-f554-4c43-ac90-8d8f68bdc43a tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273148, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.228035} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.396084] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-c002fcab-f554-4c43-ac90-8d8f68bdc43a tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 765.396084] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c002fcab-f554-4c43-ac90-8d8f68bdc43a tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 765.396084] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c002fcab-f554-4c43-ac90-8d8f68bdc43a tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 765.396084] env[69328]: INFO nova.compute.manager [None req-c002fcab-f554-4c43-ac90-8d8f68bdc43a tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Took 3.29 seconds to destroy the instance on the hypervisor. [ 765.396604] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c002fcab-f554-4c43-ac90-8d8f68bdc43a tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 765.396604] env[69328]: DEBUG nova.compute.manager [-] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 765.396604] env[69328]: DEBUG nova.network.neutron [-] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 765.532812] env[69328]: DEBUG nova.compute.utils [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 765.537678] env[69328]: DEBUG nova.compute.manager [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 765.537678] env[69328]: DEBUG nova.network.neutron [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 765.631506] env[69328]: DEBUG nova.policy [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bd52a3d6da654c17a51bf22429f67401', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e20778e9fc1841d2b38e5456dedbf102', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 765.721387] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]528da874-6da9-80d2-bb9a-8785c0832349, 'name': SearchDatastore_Task, 'duration_secs': 0.011365} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.722209] env[69328]: DEBUG nova.network.neutron [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Successfully updated port: 90f7115d-cbd5-42dd-a07a-5eb45deb5276 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 765.723250] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 765.723517] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 690096cf-a0bd-4db1-ad97-8d8a37ad7c84/690096cf-a0bd-4db1-ad97-8d8a37ad7c84.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 765.724173] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-22c8ea78-0db0-450a-b044-5c56cb50e4fb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.732270] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Waiting for the task: (returnval){ [ 765.732270] env[69328]: value = "task-3273149" [ 765.732270] env[69328]: _type = "Task" [ 765.732270] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.742964] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': task-3273149, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.935644] env[69328]: DEBUG oslo_concurrency.lockutils [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Acquiring lock "d045c9ca-71f9-411e-9048-71b36c32f4b2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 765.935644] env[69328]: DEBUG oslo_concurrency.lockutils [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Lock "d045c9ca-71f9-411e-9048-71b36c32f4b2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 765.959654] env[69328]: DEBUG nova.compute.manager [req-8c6f3982-ee8e-4ceb-87a3-7e66f0082c5c req-e3ea56dd-613d-4a7f-9fd9-a33a5d1b2f69 service nova] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Received event network-vif-plugged-90f7115d-cbd5-42dd-a07a-5eb45deb5276 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 765.960084] env[69328]: DEBUG oslo_concurrency.lockutils [req-8c6f3982-ee8e-4ceb-87a3-7e66f0082c5c req-e3ea56dd-613d-4a7f-9fd9-a33a5d1b2f69 service nova] Acquiring lock "99e31dfd-5d41-4564-886f-becc25ca289c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 765.960439] env[69328]: DEBUG oslo_concurrency.lockutils [req-8c6f3982-ee8e-4ceb-87a3-7e66f0082c5c req-e3ea56dd-613d-4a7f-9fd9-a33a5d1b2f69 service nova] Lock "99e31dfd-5d41-4564-886f-becc25ca289c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 765.960728] env[69328]: DEBUG oslo_concurrency.lockutils [req-8c6f3982-ee8e-4ceb-87a3-7e66f0082c5c req-e3ea56dd-613d-4a7f-9fd9-a33a5d1b2f69 service nova] Lock "99e31dfd-5d41-4564-886f-becc25ca289c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 765.961026] env[69328]: DEBUG nova.compute.manager [req-8c6f3982-ee8e-4ceb-87a3-7e66f0082c5c req-e3ea56dd-613d-4a7f-9fd9-a33a5d1b2f69 service nova] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] No waiting events found dispatching network-vif-plugged-90f7115d-cbd5-42dd-a07a-5eb45deb5276 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 765.961295] env[69328]: WARNING nova.compute.manager [req-8c6f3982-ee8e-4ceb-87a3-7e66f0082c5c req-e3ea56dd-613d-4a7f-9fd9-a33a5d1b2f69 service nova] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Received unexpected event network-vif-plugged-90f7115d-cbd5-42dd-a07a-5eb45deb5276 for instance with vm_state building and task_state spawning. [ 766.038214] env[69328]: DEBUG nova.compute.manager [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 766.057947] env[69328]: DEBUG nova.network.neutron [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] Successfully created port: d7451c82-01e6-4e9f-bfbc-e873dbf7896a {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 766.225249] env[69328]: DEBUG oslo_concurrency.lockutils [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Acquiring lock "refresh_cache-99e31dfd-5d41-4564-886f-becc25ca289c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 766.225249] env[69328]: DEBUG oslo_concurrency.lockutils [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Acquired lock "refresh_cache-99e31dfd-5d41-4564-886f-becc25ca289c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 766.225727] env[69328]: DEBUG nova.network.neutron [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 766.243996] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': task-3273149, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.264232] env[69328]: DEBUG nova.compute.manager [req-091d8057-ed25-43d1-a666-b3e29dd84e38 req-fd6e23b0-3175-4741-821f-cce41b4088cc service nova] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Received event network-vif-deleted-bdb8bb73-99e9-4ac7-95a7-50bf7fd362a7 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 766.264232] env[69328]: INFO nova.compute.manager [req-091d8057-ed25-43d1-a666-b3e29dd84e38 req-fd6e23b0-3175-4741-821f-cce41b4088cc service nova] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Neutron deleted interface bdb8bb73-99e9-4ac7-95a7-50bf7fd362a7; detaching it from the instance and deleting it from the info cache [ 766.264232] env[69328]: DEBUG nova.network.neutron [req-091d8057-ed25-43d1-a666-b3e29dd84e38 req-fd6e23b0-3175-4741-821f-cce41b4088cc service nova] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 766.312518] env[69328]: DEBUG nova.network.neutron [-] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 766.642313] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4facd950-d5d5-41f9-bea7-e24699897e8c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.660455] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfcebb91-7412-4417-976d-5738ee0be449 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.695884] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0204df04-8192-4cc7-a42a-4013de50c935 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.705053] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6c32609-639e-4cf4-ad19-80972beb9a31 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.720195] env[69328]: DEBUG nova.compute.provider_tree [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 766.744622] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': task-3273149, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.971553} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.744933] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 690096cf-a0bd-4db1-ad97-8d8a37ad7c84/690096cf-a0bd-4db1-ad97-8d8a37ad7c84.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 766.745171] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 766.745445] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b6f6c853-506b-4641-b14a-8fa3cca2cf09 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.754499] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Waiting for the task: (returnval){ [ 766.754499] env[69328]: value = "task-3273150" [ 766.754499] env[69328]: _type = "Task" [ 766.754499] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.764875] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': task-3273150, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.766681] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-55815f14-3fbc-47e9-8eb3-82221df8f03a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.778192] env[69328]: DEBUG nova.network.neutron [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 766.783046] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd73f8b8-8eae-403c-bd75-734cc24f43e8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.818948] env[69328]: INFO nova.compute.manager [-] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Took 1.42 seconds to deallocate network for instance. [ 766.819345] env[69328]: DEBUG nova.compute.manager [req-091d8057-ed25-43d1-a666-b3e29dd84e38 req-fd6e23b0-3175-4741-821f-cce41b4088cc service nova] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Detach interface failed, port_id=bdb8bb73-99e9-4ac7-95a7-50bf7fd362a7, reason: Instance c465c53f-d96b-461b-b8ff-b19929b4f789 could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 766.959264] env[69328]: DEBUG nova.network.neutron [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Updating instance_info_cache with network_info: [{"id": "90f7115d-cbd5-42dd-a07a-5eb45deb5276", "address": "fa:16:3e:22:47:ec", "network": {"id": "032910e6-4d2e-415b-ac3e-ed7a7fadf536", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1432969230-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "690511a8725a4dd6ab796a15569293a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "be5c038c-29e5-43c9-91ab-9eb3094b5337", "external-id": "nsx-vlan-transportzone-511", "segmentation_id": 511, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90f7115d-cb", "ovs_interfaceid": "90f7115d-cbd5-42dd-a07a-5eb45deb5276", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 767.058206] env[69328]: DEBUG nova.compute.manager [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 767.083975] env[69328]: DEBUG nova.virt.hardware [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 767.084284] env[69328]: DEBUG nova.virt.hardware [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 767.084460] env[69328]: DEBUG nova.virt.hardware [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 767.084758] env[69328]: DEBUG nova.virt.hardware [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 767.084931] env[69328]: DEBUG nova.virt.hardware [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 767.085094] env[69328]: DEBUG nova.virt.hardware [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 767.085309] env[69328]: DEBUG nova.virt.hardware [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 767.085465] env[69328]: DEBUG nova.virt.hardware [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 767.085627] env[69328]: DEBUG nova.virt.hardware [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 767.085783] env[69328]: DEBUG nova.virt.hardware [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 767.085979] env[69328]: DEBUG nova.virt.hardware [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 767.086857] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-921b5230-2791-4b43-8d83-1ab13d27efc9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.095576] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-322d6695-55f3-4fbb-9782-fb3fc8fad2f6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.223174] env[69328]: DEBUG nova.scheduler.client.report [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 767.264564] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': task-3273150, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.373063} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.264924] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 767.265564] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34bdd06c-f912-46a4-9c03-5acf8a0ce46b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.279821] env[69328]: DEBUG oslo_concurrency.lockutils [None req-dff6540d-b8fa-4825-b027-b626caa9df94 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Acquiring lock "d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 767.280067] env[69328]: DEBUG oslo_concurrency.lockutils [None req-dff6540d-b8fa-4825-b027-b626caa9df94 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Lock "d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 767.280275] env[69328]: DEBUG oslo_concurrency.lockutils [None req-dff6540d-b8fa-4825-b027-b626caa9df94 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Acquiring lock "d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 767.280458] env[69328]: DEBUG oslo_concurrency.lockutils [None req-dff6540d-b8fa-4825-b027-b626caa9df94 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Lock "d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 767.280621] env[69328]: DEBUG oslo_concurrency.lockutils [None req-dff6540d-b8fa-4825-b027-b626caa9df94 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Lock "d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 767.290560] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] Reconfiguring VM instance instance-00000029 to attach disk [datastore2] 690096cf-a0bd-4db1-ad97-8d8a37ad7c84/690096cf-a0bd-4db1-ad97-8d8a37ad7c84.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 767.291260] env[69328]: INFO nova.compute.manager [None req-dff6540d-b8fa-4825-b027-b626caa9df94 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Terminating instance [ 767.292469] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cb39d2d7-07ff-4ba0-b7e2-a1e64c5f514b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.313689] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Waiting for the task: (returnval){ [ 767.313689] env[69328]: value = "task-3273151" [ 767.313689] env[69328]: _type = "Task" [ 767.313689] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.322634] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': task-3273151, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.375853] env[69328]: INFO nova.compute.manager [None req-c002fcab-f554-4c43-ac90-8d8f68bdc43a tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Took 0.56 seconds to detach 1 volumes for instance. [ 767.462148] env[69328]: DEBUG oslo_concurrency.lockutils [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Releasing lock "refresh_cache-99e31dfd-5d41-4564-886f-becc25ca289c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 767.462392] env[69328]: DEBUG nova.compute.manager [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Instance network_info: |[{"id": "90f7115d-cbd5-42dd-a07a-5eb45deb5276", "address": "fa:16:3e:22:47:ec", "network": {"id": "032910e6-4d2e-415b-ac3e-ed7a7fadf536", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1432969230-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "690511a8725a4dd6ab796a15569293a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "be5c038c-29e5-43c9-91ab-9eb3094b5337", "external-id": "nsx-vlan-transportzone-511", "segmentation_id": 511, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90f7115d-cb", "ovs_interfaceid": "90f7115d-cbd5-42dd-a07a-5eb45deb5276", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 767.462831] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:22:47:ec', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'be5c038c-29e5-43c9-91ab-9eb3094b5337', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '90f7115d-cbd5-42dd-a07a-5eb45deb5276', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 767.471306] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 767.471538] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 767.471782] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9b0aa344-9d67-412f-afd5-1524f499a6df {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.500753] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 767.500753] env[69328]: value = "task-3273152" [ 767.500753] env[69328]: _type = "Task" [ 767.500753] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.508516] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273152, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.695453] env[69328]: DEBUG nova.network.neutron [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] Successfully updated port: d7451c82-01e6-4e9f-bfbc-e873dbf7896a {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 767.729071] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.704s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 767.730021] env[69328]: DEBUG nova.compute.manager [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 767.732366] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3dfbb600-41b4-427f-be91-f569b0800126 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.143s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 767.732567] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3dfbb600-41b4-427f-be91-f569b0800126 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 767.734643] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6f37e08b-f51f-46fa-b487-d83cc96036f5 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.486s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 767.734825] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6f37e08b-f51f-46fa-b487-d83cc96036f5 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 767.736502] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.962s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 767.737879] env[69328]: INFO nova.compute.claims [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] [instance: f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 767.765880] env[69328]: INFO nova.scheduler.client.report [None req-6f37e08b-f51f-46fa-b487-d83cc96036f5 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Deleted allocations for instance 146a3eef-0971-4f6e-bd24-58b38a1de0ed [ 767.767616] env[69328]: INFO nova.scheduler.client.report [None req-3dfbb600-41b4-427f-be91-f569b0800126 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Deleted allocations for instance 1e7e9e6e-c084-480c-8653-8441c13d7514 [ 767.809114] env[69328]: DEBUG nova.compute.manager [None req-dff6540d-b8fa-4825-b027-b626caa9df94 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 767.809359] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-dff6540d-b8fa-4825-b027-b626caa9df94 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 767.810287] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f492d209-a4d0-4c3f-a110-a1dc23a07dde {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.820216] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-dff6540d-b8fa-4825-b027-b626caa9df94 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 767.820773] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e2255c6e-f266-4ab1-a462-3c43867204d3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.825385] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': task-3273151, 'name': ReconfigVM_Task, 'duration_secs': 0.287558} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.825623] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] Reconfigured VM instance instance-00000029 to attach disk [datastore2] 690096cf-a0bd-4db1-ad97-8d8a37ad7c84/690096cf-a0bd-4db1-ad97-8d8a37ad7c84.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 767.827141] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7736863c-ca67-4488-a691-ab3c989911bf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.828738] env[69328]: DEBUG oslo_vmware.api [None req-dff6540d-b8fa-4825-b027-b626caa9df94 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Waiting for the task: (returnval){ [ 767.828738] env[69328]: value = "task-3273153" [ 767.828738] env[69328]: _type = "Task" [ 767.828738] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.833485] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Waiting for the task: (returnval){ [ 767.833485] env[69328]: value = "task-3273154" [ 767.833485] env[69328]: _type = "Task" [ 767.833485] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.840948] env[69328]: DEBUG oslo_vmware.api [None req-dff6540d-b8fa-4825-b027-b626caa9df94 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Task: {'id': task-3273153, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.849223] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': task-3273154, 'name': Rename_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.882628] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c002fcab-f554-4c43-ac90-8d8f68bdc43a tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 768.010882] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273152, 'name': CreateVM_Task} progress is 25%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.031133] env[69328]: DEBUG nova.compute.manager [req-5d6769f6-ce61-4f6a-a2ae-1ab3026829f7 req-b1f91c20-95d4-4bea-bcb0-39791c05c610 service nova] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Received event network-changed-90f7115d-cbd5-42dd-a07a-5eb45deb5276 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 768.031303] env[69328]: DEBUG nova.compute.manager [req-5d6769f6-ce61-4f6a-a2ae-1ab3026829f7 req-b1f91c20-95d4-4bea-bcb0-39791c05c610 service nova] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Refreshing instance network info cache due to event network-changed-90f7115d-cbd5-42dd-a07a-5eb45deb5276. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 768.032110] env[69328]: DEBUG oslo_concurrency.lockutils [req-5d6769f6-ce61-4f6a-a2ae-1ab3026829f7 req-b1f91c20-95d4-4bea-bcb0-39791c05c610 service nova] Acquiring lock "refresh_cache-99e31dfd-5d41-4564-886f-becc25ca289c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 768.032110] env[69328]: DEBUG oslo_concurrency.lockutils [req-5d6769f6-ce61-4f6a-a2ae-1ab3026829f7 req-b1f91c20-95d4-4bea-bcb0-39791c05c610 service nova] Acquired lock "refresh_cache-99e31dfd-5d41-4564-886f-becc25ca289c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 768.032110] env[69328]: DEBUG nova.network.neutron [req-5d6769f6-ce61-4f6a-a2ae-1ab3026829f7 req-b1f91c20-95d4-4bea-bcb0-39791c05c610 service nova] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Refreshing network info cache for port 90f7115d-cbd5-42dd-a07a-5eb45deb5276 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 768.199053] env[69328]: DEBUG oslo_concurrency.lockutils [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Acquiring lock "refresh_cache-d724a141-35e7-4483-99aa-8a17066fb63b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 768.199053] env[69328]: DEBUG oslo_concurrency.lockutils [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Acquired lock "refresh_cache-d724a141-35e7-4483-99aa-8a17066fb63b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 768.199357] env[69328]: DEBUG nova.network.neutron [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 768.242885] env[69328]: DEBUG nova.compute.utils [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 768.246478] env[69328]: DEBUG nova.compute.manager [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 768.246613] env[69328]: DEBUG nova.network.neutron [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 768.282504] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6f37e08b-f51f-46fa-b487-d83cc96036f5 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Lock "146a3eef-0971-4f6e-bd24-58b38a1de0ed" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.782s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 768.283677] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3dfbb600-41b4-427f-be91-f569b0800126 tempest-MultipleCreateTestJSON-1150672461 tempest-MultipleCreateTestJSON-1150672461-project-member] Lock "1e7e9e6e-c084-480c-8653-8441c13d7514" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.212s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 768.328961] env[69328]: DEBUG nova.compute.manager [req-5349c41b-028e-440e-b2de-536142a20697 req-14de99cb-d7a1-4e24-92d9-db9fe25a33e2 service nova] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] Received event network-vif-plugged-d7451c82-01e6-4e9f-bfbc-e873dbf7896a {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 768.329229] env[69328]: DEBUG oslo_concurrency.lockutils [req-5349c41b-028e-440e-b2de-536142a20697 req-14de99cb-d7a1-4e24-92d9-db9fe25a33e2 service nova] Acquiring lock "d724a141-35e7-4483-99aa-8a17066fb63b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 768.329531] env[69328]: DEBUG oslo_concurrency.lockutils [req-5349c41b-028e-440e-b2de-536142a20697 req-14de99cb-d7a1-4e24-92d9-db9fe25a33e2 service nova] Lock "d724a141-35e7-4483-99aa-8a17066fb63b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 768.329742] env[69328]: DEBUG oslo_concurrency.lockutils [req-5349c41b-028e-440e-b2de-536142a20697 req-14de99cb-d7a1-4e24-92d9-db9fe25a33e2 service nova] Lock "d724a141-35e7-4483-99aa-8a17066fb63b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 768.329948] env[69328]: DEBUG nova.compute.manager [req-5349c41b-028e-440e-b2de-536142a20697 req-14de99cb-d7a1-4e24-92d9-db9fe25a33e2 service nova] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] No waiting events found dispatching network-vif-plugged-d7451c82-01e6-4e9f-bfbc-e873dbf7896a {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 768.330157] env[69328]: WARNING nova.compute.manager [req-5349c41b-028e-440e-b2de-536142a20697 req-14de99cb-d7a1-4e24-92d9-db9fe25a33e2 service nova] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] Received unexpected event network-vif-plugged-d7451c82-01e6-4e9f-bfbc-e873dbf7896a for instance with vm_state building and task_state spawning. [ 768.330321] env[69328]: DEBUG nova.compute.manager [req-5349c41b-028e-440e-b2de-536142a20697 req-14de99cb-d7a1-4e24-92d9-db9fe25a33e2 service nova] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] Received event network-changed-d7451c82-01e6-4e9f-bfbc-e873dbf7896a {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 768.330610] env[69328]: DEBUG nova.compute.manager [req-5349c41b-028e-440e-b2de-536142a20697 req-14de99cb-d7a1-4e24-92d9-db9fe25a33e2 service nova] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] Refreshing instance network info cache due to event network-changed-d7451c82-01e6-4e9f-bfbc-e873dbf7896a. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 768.330677] env[69328]: DEBUG oslo_concurrency.lockutils [req-5349c41b-028e-440e-b2de-536142a20697 req-14de99cb-d7a1-4e24-92d9-db9fe25a33e2 service nova] Acquiring lock "refresh_cache-d724a141-35e7-4483-99aa-8a17066fb63b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 768.340580] env[69328]: DEBUG nova.policy [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dd0c2e2c547e4da4a3c5dffe87337c02', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0ef63e916e324066a8feacfe8a4b6358', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 768.346777] env[69328]: DEBUG oslo_vmware.api [None req-dff6540d-b8fa-4825-b027-b626caa9df94 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Task: {'id': task-3273153, 'name': PowerOffVM_Task, 'duration_secs': 0.513489} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.347056] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-dff6540d-b8fa-4825-b027-b626caa9df94 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 768.347356] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-dff6540d-b8fa-4825-b027-b626caa9df94 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 768.347620] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a20c695c-590c-4fd5-a08b-bdfe41167029 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.353430] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': task-3273154, 'name': Rename_Task, 'duration_secs': 0.151816} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.353430] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 768.353430] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6bca9203-5580-4cb4-8a77-109d506671f4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.361846] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Waiting for the task: (returnval){ [ 768.361846] env[69328]: value = "task-3273156" [ 768.361846] env[69328]: _type = "Task" [ 768.361846] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.370875] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': task-3273156, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.432562] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-dff6540d-b8fa-4825-b027-b626caa9df94 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 768.432822] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-dff6540d-b8fa-4825-b027-b626caa9df94 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 768.433178] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-dff6540d-b8fa-4825-b027-b626caa9df94 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Deleting the datastore file [datastore1] d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 768.433265] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9aaa8784-5230-4aee-af6b-598a5d7fe952 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.443593] env[69328]: DEBUG oslo_vmware.api [None req-dff6540d-b8fa-4825-b027-b626caa9df94 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Waiting for the task: (returnval){ [ 768.443593] env[69328]: value = "task-3273157" [ 768.443593] env[69328]: _type = "Task" [ 768.443593] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.458406] env[69328]: DEBUG oslo_vmware.api [None req-dff6540d-b8fa-4825-b027-b626caa9df94 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Task: {'id': task-3273157, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.512800] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273152, 'name': CreateVM_Task, 'duration_secs': 0.971854} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.512970] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 768.514324] env[69328]: DEBUG oslo_concurrency.lockutils [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 768.514882] env[69328]: DEBUG oslo_concurrency.lockutils [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 768.514882] env[69328]: DEBUG oslo_concurrency.lockutils [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 768.515099] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ae65c01-f471-4f92-91e1-25aeea472918 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.521073] env[69328]: DEBUG oslo_vmware.api [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Waiting for the task: (returnval){ [ 768.521073] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f427a9-7695-d61d-9a99-d95520af9984" [ 768.521073] env[69328]: _type = "Task" [ 768.521073] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.531895] env[69328]: DEBUG oslo_vmware.api [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f427a9-7695-d61d-9a99-d95520af9984, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.747507] env[69328]: DEBUG nova.compute.manager [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 768.755893] env[69328]: DEBUG nova.network.neutron [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 768.875369] env[69328]: DEBUG oslo_vmware.api [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': task-3273156, 'name': PowerOnVM_Task, 'duration_secs': 0.512257} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.878121] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 768.878341] env[69328]: INFO nova.compute.manager [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] Took 8.23 seconds to spawn the instance on the hypervisor. [ 768.878522] env[69328]: DEBUG nova.compute.manager [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 768.879388] env[69328]: DEBUG nova.network.neutron [req-5d6769f6-ce61-4f6a-a2ae-1ab3026829f7 req-b1f91c20-95d4-4bea-bcb0-39791c05c610 service nova] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Updated VIF entry in instance network info cache for port 90f7115d-cbd5-42dd-a07a-5eb45deb5276. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 768.879711] env[69328]: DEBUG nova.network.neutron [req-5d6769f6-ce61-4f6a-a2ae-1ab3026829f7 req-b1f91c20-95d4-4bea-bcb0-39791c05c610 service nova] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Updating instance_info_cache with network_info: [{"id": "90f7115d-cbd5-42dd-a07a-5eb45deb5276", "address": "fa:16:3e:22:47:ec", "network": {"id": "032910e6-4d2e-415b-ac3e-ed7a7fadf536", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1432969230-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "690511a8725a4dd6ab796a15569293a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "be5c038c-29e5-43c9-91ab-9eb3094b5337", "external-id": "nsx-vlan-transportzone-511", "segmentation_id": 511, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90f7115d-cb", "ovs_interfaceid": "90f7115d-cbd5-42dd-a07a-5eb45deb5276", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 768.882619] env[69328]: DEBUG nova.network.neutron [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Successfully created port: fbe60697-372d-45c9-97c0-49ce01cbc064 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 768.884685] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f2c988c-67a2-41dd-877b-8223391c6af6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.954596] env[69328]: DEBUG oslo_vmware.api [None req-dff6540d-b8fa-4825-b027-b626caa9df94 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Task: {'id': task-3273157, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.23442} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.954855] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-dff6540d-b8fa-4825-b027-b626caa9df94 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 768.955053] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-dff6540d-b8fa-4825-b027-b626caa9df94 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 768.955244] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-dff6540d-b8fa-4825-b027-b626caa9df94 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 768.955413] env[69328]: INFO nova.compute.manager [None req-dff6540d-b8fa-4825-b027-b626caa9df94 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Took 1.15 seconds to destroy the instance on the hypervisor. [ 768.955647] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-dff6540d-b8fa-4825-b027-b626caa9df94 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 768.955831] env[69328]: DEBUG nova.compute.manager [-] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 768.955924] env[69328]: DEBUG nova.network.neutron [-] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 769.010143] env[69328]: DEBUG nova.network.neutron [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] Updating instance_info_cache with network_info: [{"id": "d7451c82-01e6-4e9f-bfbc-e873dbf7896a", "address": "fa:16:3e:2d:34:4e", "network": {"id": "f37f62db-d4a4-4c8a-a9e0-eac14a2262cf", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1593605925-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e20778e9fc1841d2b38e5456dedbf102", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e7a4976-597e-4636-990e-6062b5faadee", "external-id": "nsx-vlan-transportzone-847", "segmentation_id": 847, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd7451c82-01", "ovs_interfaceid": "d7451c82-01e6-4e9f-bfbc-e873dbf7896a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 769.033171] env[69328]: DEBUG oslo_vmware.api [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f427a9-7695-d61d-9a99-d95520af9984, 'name': SearchDatastore_Task, 'duration_secs': 0.010213} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.033480] env[69328]: DEBUG oslo_concurrency.lockutils [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 769.033712] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 769.033981] env[69328]: DEBUG oslo_concurrency.lockutils [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 769.034114] env[69328]: DEBUG oslo_concurrency.lockutils [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 769.034303] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 769.034948] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-34d0d466-8420-4924-be95-bceb3a106eb9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.044523] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 769.044705] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 769.045538] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d49c313-ac7d-4a99-a5f5-2a979e612d01 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.052986] env[69328]: DEBUG oslo_vmware.api [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Waiting for the task: (returnval){ [ 769.052986] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]521a7c96-a216-dfdb-336b-3dc8b4bcffdc" [ 769.052986] env[69328]: _type = "Task" [ 769.052986] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.061426] env[69328]: DEBUG oslo_vmware.api [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]521a7c96-a216-dfdb-336b-3dc8b4bcffdc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.259719] env[69328]: INFO nova.virt.block_device [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Booting with volume aa5050fe-f367-4822-9aa7-4bfac9106402 at /dev/sda [ 769.284862] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34ee2c06-befc-461c-a9c6-aa5d6a637667 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.298250] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-991adda1-761c-429b-ba0a-b77ab0ac73f8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.303172] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1e11e49f-6d60-492e-81e0-2f0f3f4c5313 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.341521] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea6406a4-e984-436f-81a3-96d1c260876d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.348543] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe3b84a8-d4b1-46c6-9f08-9a4c359b7239 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.363975] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91e160eb-36af-4f7f-94e1-fc710211796d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.378267] env[69328]: DEBUG nova.compute.provider_tree [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 769.390774] env[69328]: DEBUG oslo_concurrency.lockutils [req-5d6769f6-ce61-4f6a-a2ae-1ab3026829f7 req-b1f91c20-95d4-4bea-bcb0-39791c05c610 service nova] Releasing lock "refresh_cache-99e31dfd-5d41-4564-886f-becc25ca289c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 769.392091] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7ff33c96-f7de-4e42-afde-1010ad35cd9e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.401218] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e917e22-cd88-400f-b2a8-797f35f43ba5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.419835] env[69328]: INFO nova.compute.manager [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] Took 55.80 seconds to build instance. [ 769.439866] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1182ec70-e769-40ba-a7a2-bd5e6ec207e3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.448901] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5453f866-7bb3-433b-9ed3-0f42c5d7f441 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.467246] env[69328]: DEBUG nova.virt.block_device [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Updating existing volume attachment record: 1161ad5a-2471-45ff-8f54-a5fecc883515 {{(pid=69328) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 769.514131] env[69328]: DEBUG oslo_concurrency.lockutils [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Releasing lock "refresh_cache-d724a141-35e7-4483-99aa-8a17066fb63b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 769.514131] env[69328]: DEBUG nova.compute.manager [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] Instance network_info: |[{"id": "d7451c82-01e6-4e9f-bfbc-e873dbf7896a", "address": "fa:16:3e:2d:34:4e", "network": {"id": "f37f62db-d4a4-4c8a-a9e0-eac14a2262cf", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1593605925-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e20778e9fc1841d2b38e5456dedbf102", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e7a4976-597e-4636-990e-6062b5faadee", "external-id": "nsx-vlan-transportzone-847", "segmentation_id": 847, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd7451c82-01", "ovs_interfaceid": "d7451c82-01e6-4e9f-bfbc-e873dbf7896a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 769.514377] env[69328]: DEBUG oslo_concurrency.lockutils [req-5349c41b-028e-440e-b2de-536142a20697 req-14de99cb-d7a1-4e24-92d9-db9fe25a33e2 service nova] Acquired lock "refresh_cache-d724a141-35e7-4483-99aa-8a17066fb63b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 769.514377] env[69328]: DEBUG nova.network.neutron [req-5349c41b-028e-440e-b2de-536142a20697 req-14de99cb-d7a1-4e24-92d9-db9fe25a33e2 service nova] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] Refreshing network info cache for port d7451c82-01e6-4e9f-bfbc-e873dbf7896a {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 769.515506] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2d:34:4e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1e7a4976-597e-4636-990e-6062b5faadee', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd7451c82-01e6-4e9f-bfbc-e873dbf7896a', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 769.524771] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Creating folder: Project (e20778e9fc1841d2b38e5456dedbf102). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 769.525607] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8b0a6178-30ad-453b-8d1f-a491634ac798 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.538212] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Created folder: Project (e20778e9fc1841d2b38e5456dedbf102) in parent group-v653649. [ 769.538440] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Creating folder: Instances. Parent ref: group-v653779. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 769.538732] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0373c9e5-61d3-468c-aaba-d1576cae57d7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.548571] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Created folder: Instances in parent group-v653779. [ 769.548865] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 769.549120] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 769.549384] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-88697fc0-f6db-4408-90ef-4850f0c87f9c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.574862] env[69328]: DEBUG oslo_vmware.api [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]521a7c96-a216-dfdb-336b-3dc8b4bcffdc, 'name': SearchDatastore_Task, 'duration_secs': 0.044111} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.576907] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 769.576907] env[69328]: value = "task-3273160" [ 769.576907] env[69328]: _type = "Task" [ 769.576907] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.577510] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c6d0dd5-cf96-4228-b181-61f5c107bc25 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.586355] env[69328]: DEBUG oslo_vmware.api [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Waiting for the task: (returnval){ [ 769.586355] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]524bf1f2-9adc-0f79-d28c-897f9e9d1416" [ 769.586355] env[69328]: _type = "Task" [ 769.586355] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.598221] env[69328]: DEBUG oslo_vmware.api [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]524bf1f2-9adc-0f79-d28c-897f9e9d1416, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.881484] env[69328]: DEBUG nova.scheduler.client.report [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 769.920338] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e5b8f03-67f8-4192-833f-ac19429cab8d tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Lock "690096cf-a0bd-4db1-ad97-8d8a37ad7c84" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 94.896s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 770.090810] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273160, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.100361] env[69328]: DEBUG oslo_vmware.api [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]524bf1f2-9adc-0f79-d28c-897f9e9d1416, 'name': SearchDatastore_Task, 'duration_secs': 0.014129} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.100622] env[69328]: DEBUG oslo_concurrency.lockutils [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 770.101467] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 99e31dfd-5d41-4564-886f-becc25ca289c/99e31dfd-5d41-4564-886f-becc25ca289c.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 770.101467] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4b86b134-e781-4a73-99af-73a518e3f911 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.106835] env[69328]: DEBUG oslo_vmware.api [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Waiting for the task: (returnval){ [ 770.106835] env[69328]: value = "task-3273161" [ 770.106835] env[69328]: _type = "Task" [ 770.106835] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.115035] env[69328]: DEBUG nova.network.neutron [-] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 770.120315] env[69328]: DEBUG oslo_vmware.api [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3273161, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.295308] env[69328]: DEBUG nova.network.neutron [req-5349c41b-028e-440e-b2de-536142a20697 req-14de99cb-d7a1-4e24-92d9-db9fe25a33e2 service nova] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] Updated VIF entry in instance network info cache for port d7451c82-01e6-4e9f-bfbc-e873dbf7896a. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 770.295308] env[69328]: DEBUG nova.network.neutron [req-5349c41b-028e-440e-b2de-536142a20697 req-14de99cb-d7a1-4e24-92d9-db9fe25a33e2 service nova] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] Updating instance_info_cache with network_info: [{"id": "d7451c82-01e6-4e9f-bfbc-e873dbf7896a", "address": "fa:16:3e:2d:34:4e", "network": {"id": "f37f62db-d4a4-4c8a-a9e0-eac14a2262cf", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1593605925-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e20778e9fc1841d2b38e5456dedbf102", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e7a4976-597e-4636-990e-6062b5faadee", "external-id": "nsx-vlan-transportzone-847", "segmentation_id": 847, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd7451c82-01", "ovs_interfaceid": "d7451c82-01e6-4e9f-bfbc-e873dbf7896a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 770.389760] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.653s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 770.390651] env[69328]: DEBUG nova.compute.manager [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] [instance: f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 770.398946] env[69328]: DEBUG oslo_concurrency.lockutils [None req-76f2b362-e0df-4234-aef2-d298bd07cb45 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.099s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 770.399246] env[69328]: DEBUG oslo_concurrency.lockutils [None req-76f2b362-e0df-4234-aef2-d298bd07cb45 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 770.403187] env[69328]: DEBUG oslo_concurrency.lockutils [None req-af1ea56b-1ba2-47fb-9021-e84f5b1f1c5b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.102s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 770.403187] env[69328]: DEBUG oslo_concurrency.lockutils [None req-af1ea56b-1ba2-47fb-9021-e84f5b1f1c5b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 770.405751] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f9e274ee-1c49-4fd0-91b5-055e0e13c669 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.025s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 770.405751] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f9e274ee-1c49-4fd0-91b5-055e0e13c669 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 770.405751] env[69328]: DEBUG oslo_concurrency.lockutils [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.538s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 770.407442] env[69328]: INFO nova.compute.claims [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 770.425923] env[69328]: DEBUG nova.compute.manager [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 770.440175] env[69328]: INFO nova.scheduler.client.report [None req-af1ea56b-1ba2-47fb-9021-e84f5b1f1c5b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Deleted allocations for instance e5d3df12-5334-44c8-9a44-1674e57918bb [ 770.449845] env[69328]: INFO nova.scheduler.client.report [None req-f9e274ee-1c49-4fd0-91b5-055e0e13c669 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Deleted allocations for instance 46526210-2783-408d-9ecb-773f33ff0c66 [ 770.488289] env[69328]: DEBUG nova.compute.manager [req-aa347c6f-a029-4f01-9aee-966caa501526 req-a8a29146-0deb-426d-b2a6-30ef34880a4a service nova] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Received event network-vif-deleted-09c4fb65-f87f-4fdc-9a85-cf73224a3ca3 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 770.493463] env[69328]: INFO nova.scheduler.client.report [None req-76f2b362-e0df-4234-aef2-d298bd07cb45 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Deleted allocations for instance 4c54c0dd-32f1-4d35-b770-3e1a540c54a7 [ 770.594389] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273160, 'name': CreateVM_Task, 'duration_secs': 0.532241} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.594389] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 770.594389] env[69328]: DEBUG oslo_concurrency.lockutils [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.594389] env[69328]: DEBUG oslo_concurrency.lockutils [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 770.594885] env[69328]: DEBUG oslo_concurrency.lockutils [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 770.596554] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31a1ae52-5b2c-42bf-bb20-ab4f4e0c6a45 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.603144] env[69328]: DEBUG oslo_vmware.api [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Waiting for the task: (returnval){ [ 770.603144] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52270e43-59ea-ff3b-4fdb-89b170176b45" [ 770.603144] env[69328]: _type = "Task" [ 770.603144] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.614945] env[69328]: DEBUG oslo_vmware.api [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52270e43-59ea-ff3b-4fdb-89b170176b45, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.619448] env[69328]: INFO nova.compute.manager [-] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Took 1.66 seconds to deallocate network for instance. [ 770.619545] env[69328]: DEBUG oslo_vmware.api [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3273161, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.798438] env[69328]: DEBUG oslo_concurrency.lockutils [req-5349c41b-028e-440e-b2de-536142a20697 req-14de99cb-d7a1-4e24-92d9-db9fe25a33e2 service nova] Releasing lock "refresh_cache-d724a141-35e7-4483-99aa-8a17066fb63b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 770.890658] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cce60ede-7212-4a64-a3f5-72aa1be1edc1 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Acquiring lock "5292b759-9d1f-486a-b4d6-90519b3ae986" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 770.890658] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cce60ede-7212-4a64-a3f5-72aa1be1edc1 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Lock "5292b759-9d1f-486a-b4d6-90519b3ae986" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 770.890658] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cce60ede-7212-4a64-a3f5-72aa1be1edc1 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Acquiring lock "5292b759-9d1f-486a-b4d6-90519b3ae986-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 770.890658] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cce60ede-7212-4a64-a3f5-72aa1be1edc1 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Lock "5292b759-9d1f-486a-b4d6-90519b3ae986-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 770.891068] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cce60ede-7212-4a64-a3f5-72aa1be1edc1 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Lock "5292b759-9d1f-486a-b4d6-90519b3ae986-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 770.895966] env[69328]: INFO nova.compute.manager [None req-cce60ede-7212-4a64-a3f5-72aa1be1edc1 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 5292b759-9d1f-486a-b4d6-90519b3ae986] Terminating instance [ 770.911675] env[69328]: DEBUG nova.compute.utils [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 770.912890] env[69328]: DEBUG nova.compute.manager [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] [instance: f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe] Not allocating networking since 'none' was specified. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 770.951846] env[69328]: DEBUG oslo_concurrency.lockutils [None req-af1ea56b-1ba2-47fb-9021-e84f5b1f1c5b tempest-ImagesNegativeTestJSON-905395821 tempest-ImagesNegativeTestJSON-905395821-project-member] Lock "e5d3df12-5334-44c8-9a44-1674e57918bb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.359s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 770.953876] env[69328]: DEBUG nova.network.neutron [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Successfully updated port: fbe60697-372d-45c9-97c0-49ce01cbc064 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 770.960817] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f9e274ee-1c49-4fd0-91b5-055e0e13c669 tempest-AttachInterfacesUnderV243Test-18127939 tempest-AttachInterfacesUnderV243Test-18127939-project-member] Lock "46526210-2783-408d-9ecb-773f33ff0c66" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.513s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 770.961947] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 771.001972] env[69328]: DEBUG oslo_concurrency.lockutils [None req-76f2b362-e0df-4234-aef2-d298bd07cb45 tempest-DeleteServersAdminTestJSON-1390927168 tempest-DeleteServersAdminTestJSON-1390927168-project-member] Lock "4c54c0dd-32f1-4d35-b770-3e1a540c54a7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.506s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 771.119067] env[69328]: DEBUG oslo_vmware.api [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52270e43-59ea-ff3b-4fdb-89b170176b45, 'name': SearchDatastore_Task, 'duration_secs': 0.023187} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.121665] env[69328]: DEBUG oslo_concurrency.lockutils [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 771.121901] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 771.122156] env[69328]: DEBUG oslo_concurrency.lockutils [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 771.122299] env[69328]: DEBUG oslo_concurrency.lockutils [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 771.123263] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 771.123263] env[69328]: DEBUG oslo_vmware.api [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3273161, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.55183} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.123263] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-38177087-900e-4a8e-9955-f31619f41ac4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.124835] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 99e31dfd-5d41-4564-886f-becc25ca289c/99e31dfd-5d41-4564-886f-becc25ca289c.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 771.125049] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 771.125383] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0734fbbe-f008-4725-ab2c-8c2f3f06f256 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.128228] env[69328]: DEBUG oslo_concurrency.lockutils [None req-dff6540d-b8fa-4825-b027-b626caa9df94 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 771.134030] env[69328]: DEBUG oslo_vmware.api [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Waiting for the task: (returnval){ [ 771.134030] env[69328]: value = "task-3273162" [ 771.134030] env[69328]: _type = "Task" [ 771.134030] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.134251] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 771.134386] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 771.138510] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-550f45dc-824d-4caf-ae11-8f5531ea99ed {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.149022] env[69328]: DEBUG oslo_vmware.api [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3273162, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.149022] env[69328]: DEBUG oslo_vmware.api [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Waiting for the task: (returnval){ [ 771.149022] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52184ca0-ef74-fa47-5dd6-209f397a6f51" [ 771.149022] env[69328]: _type = "Task" [ 771.149022] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.155833] env[69328]: DEBUG oslo_vmware.api [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52184ca0-ef74-fa47-5dd6-209f397a6f51, 'name': SearchDatastore_Task} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.156599] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3bbc6631-9bc1-44bd-831c-e8d27518e5db {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.162352] env[69328]: DEBUG oslo_vmware.api [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Waiting for the task: (returnval){ [ 771.162352] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]521e6c27-884a-ad1b-e2be-8e59375b0e71" [ 771.162352] env[69328]: _type = "Task" [ 771.162352] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.170663] env[69328]: DEBUG oslo_vmware.api [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]521e6c27-884a-ad1b-e2be-8e59375b0e71, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.401286] env[69328]: DEBUG nova.compute.manager [None req-cce60ede-7212-4a64-a3f5-72aa1be1edc1 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 5292b759-9d1f-486a-b4d6-90519b3ae986] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 771.401525] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-cce60ede-7212-4a64-a3f5-72aa1be1edc1 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 5292b759-9d1f-486a-b4d6-90519b3ae986] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 771.402438] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a50d097-c894-4bb4-8fa3-36956f37e1c2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.409825] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-cce60ede-7212-4a64-a3f5-72aa1be1edc1 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 5292b759-9d1f-486a-b4d6-90519b3ae986] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 771.410090] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-15aa5fe0-0f38-4ce7-add8-6181f8419d72 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.413677] env[69328]: DEBUG nova.compute.manager [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] [instance: f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 771.422028] env[69328]: DEBUG oslo_vmware.api [None req-cce60ede-7212-4a64-a3f5-72aa1be1edc1 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Waiting for the task: (returnval){ [ 771.422028] env[69328]: value = "task-3273163" [ 771.422028] env[69328]: _type = "Task" [ 771.422028] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.438162] env[69328]: DEBUG oslo_vmware.api [None req-cce60ede-7212-4a64-a3f5-72aa1be1edc1 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': task-3273163, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.458927] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Acquiring lock "refresh_cache-18022645-9a2a-489e-b0b1-486165f46f14" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 771.458927] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Acquired lock "refresh_cache-18022645-9a2a-489e-b0b1-486165f46f14" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 771.459130] env[69328]: DEBUG nova.network.neutron [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 771.582425] env[69328]: DEBUG nova.compute.manager [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 771.583028] env[69328]: DEBUG nova.virt.hardware [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 771.583299] env[69328]: DEBUG nova.virt.hardware [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 771.583546] env[69328]: DEBUG nova.virt.hardware [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 771.584404] env[69328]: DEBUG nova.virt.hardware [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 771.584404] env[69328]: DEBUG nova.virt.hardware [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 771.584404] env[69328]: DEBUG nova.virt.hardware [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 771.584404] env[69328]: DEBUG nova.virt.hardware [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 771.584964] env[69328]: DEBUG nova.virt.hardware [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 771.585181] env[69328]: DEBUG nova.virt.hardware [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 771.585399] env[69328]: DEBUG nova.virt.hardware [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 771.585723] env[69328]: DEBUG nova.virt.hardware [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 771.586550] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf575783-b53c-40c9-8edd-14ea57392e21 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.602793] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa94d548-e37b-4e20-8312-9d33dcbfb2e6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.645918] env[69328]: DEBUG oslo_vmware.api [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3273162, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067479} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.646373] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 771.647211] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f523bbb-5277-46cd-beca-f9f49f49ab04 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.677021] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Reconfiguring VM instance instance-0000002a to attach disk [datastore2] 99e31dfd-5d41-4564-886f-becc25ca289c/99e31dfd-5d41-4564-886f-becc25ca289c.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 771.680417] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cf49ed18-b78a-4505-aae8-fb36501108ea {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.704162] env[69328]: DEBUG oslo_vmware.api [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]521e6c27-884a-ad1b-e2be-8e59375b0e71, 'name': SearchDatastore_Task, 'duration_secs': 0.008466} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.706318] env[69328]: DEBUG oslo_concurrency.lockutils [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 771.706318] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] d724a141-35e7-4483-99aa-8a17066fb63b/d724a141-35e7-4483-99aa-8a17066fb63b.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 771.706870] env[69328]: DEBUG oslo_vmware.api [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Waiting for the task: (returnval){ [ 771.706870] env[69328]: value = "task-3273164" [ 771.706870] env[69328]: _type = "Task" [ 771.706870] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.706870] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c80d1a05-718f-4e50-a45d-ace811e55f88 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.479724] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Acquiring lock "4a990411-16cd-4e53-9068-29654b69abe6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 772.480060] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Lock "4a990411-16cd-4e53-9068-29654b69abe6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 772.480389] env[69328]: DEBUG oslo_vmware.api [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Waiting for the task: (returnval){ [ 772.480389] env[69328]: value = "task-3273165" [ 772.480389] env[69328]: _type = "Task" [ 772.480389] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.488103] env[69328]: DEBUG oslo_vmware.api [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3273164, 'name': ReconfigVM_Task, 'duration_secs': 0.308056} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.492717] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Reconfigured VM instance instance-0000002a to attach disk [datastore2] 99e31dfd-5d41-4564-886f-becc25ca289c/99e31dfd-5d41-4564-886f-becc25ca289c.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 772.492717] env[69328]: DEBUG oslo_vmware.api [None req-cce60ede-7212-4a64-a3f5-72aa1be1edc1 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': task-3273163, 'name': PowerOffVM_Task, 'duration_secs': 0.216965} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.493317] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4fbebdb8-3888-4abd-b8ee-b1fc215f940d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.495267] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-cce60ede-7212-4a64-a3f5-72aa1be1edc1 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 5292b759-9d1f-486a-b4d6-90519b3ae986] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 772.495458] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-cce60ede-7212-4a64-a3f5-72aa1be1edc1 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 5292b759-9d1f-486a-b4d6-90519b3ae986] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 772.501755] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1f52055f-b2b2-48c6-b5c5-dd5e069fbc74 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.503354] env[69328]: DEBUG oslo_vmware.api [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Task: {'id': task-3273165, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.509455] env[69328]: DEBUG oslo_vmware.api [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Waiting for the task: (returnval){ [ 772.509455] env[69328]: value = "task-3273166" [ 772.509455] env[69328]: _type = "Task" [ 772.509455] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.520359] env[69328]: DEBUG oslo_vmware.api [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3273166, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.543359] env[69328]: DEBUG nova.compute.manager [req-7dc2d9a4-13a1-40bd-831f-747e01db5fe0 req-d177d2ed-2853-4074-9b79-789b6abddda3 service nova] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Received event network-vif-plugged-fbe60697-372d-45c9-97c0-49ce01cbc064 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 772.543988] env[69328]: DEBUG oslo_concurrency.lockutils [req-7dc2d9a4-13a1-40bd-831f-747e01db5fe0 req-d177d2ed-2853-4074-9b79-789b6abddda3 service nova] Acquiring lock "18022645-9a2a-489e-b0b1-486165f46f14-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 772.544319] env[69328]: DEBUG oslo_concurrency.lockutils [req-7dc2d9a4-13a1-40bd-831f-747e01db5fe0 req-d177d2ed-2853-4074-9b79-789b6abddda3 service nova] Lock "18022645-9a2a-489e-b0b1-486165f46f14-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 772.544477] env[69328]: DEBUG oslo_concurrency.lockutils [req-7dc2d9a4-13a1-40bd-831f-747e01db5fe0 req-d177d2ed-2853-4074-9b79-789b6abddda3 service nova] Lock "18022645-9a2a-489e-b0b1-486165f46f14-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 772.544687] env[69328]: DEBUG nova.compute.manager [req-7dc2d9a4-13a1-40bd-831f-747e01db5fe0 req-d177d2ed-2853-4074-9b79-789b6abddda3 service nova] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] No waiting events found dispatching network-vif-plugged-fbe60697-372d-45c9-97c0-49ce01cbc064 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 772.544886] env[69328]: WARNING nova.compute.manager [req-7dc2d9a4-13a1-40bd-831f-747e01db5fe0 req-d177d2ed-2853-4074-9b79-789b6abddda3 service nova] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Received unexpected event network-vif-plugged-fbe60697-372d-45c9-97c0-49ce01cbc064 for instance with vm_state building and task_state spawning. [ 772.545107] env[69328]: DEBUG nova.compute.manager [req-7dc2d9a4-13a1-40bd-831f-747e01db5fe0 req-d177d2ed-2853-4074-9b79-789b6abddda3 service nova] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Received event network-changed-fbe60697-372d-45c9-97c0-49ce01cbc064 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 772.545316] env[69328]: DEBUG nova.compute.manager [req-7dc2d9a4-13a1-40bd-831f-747e01db5fe0 req-d177d2ed-2853-4074-9b79-789b6abddda3 service nova] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Refreshing instance network info cache due to event network-changed-fbe60697-372d-45c9-97c0-49ce01cbc064. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 772.545516] env[69328]: DEBUG oslo_concurrency.lockutils [req-7dc2d9a4-13a1-40bd-831f-747e01db5fe0 req-d177d2ed-2853-4074-9b79-789b6abddda3 service nova] Acquiring lock "refresh_cache-18022645-9a2a-489e-b0b1-486165f46f14" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 772.576127] env[69328]: DEBUG nova.network.neutron [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 772.583754] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-cce60ede-7212-4a64-a3f5-72aa1be1edc1 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 5292b759-9d1f-486a-b4d6-90519b3ae986] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 772.583754] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-cce60ede-7212-4a64-a3f5-72aa1be1edc1 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 5292b759-9d1f-486a-b4d6-90519b3ae986] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 772.583879] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-cce60ede-7212-4a64-a3f5-72aa1be1edc1 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Deleting the datastore file [datastore2] 5292b759-9d1f-486a-b4d6-90519b3ae986 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 772.588023] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0bfa32ae-b810-4c80-8afd-3b99c99ea810 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.594034] env[69328]: DEBUG oslo_vmware.api [None req-cce60ede-7212-4a64-a3f5-72aa1be1edc1 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Waiting for the task: (returnval){ [ 772.594034] env[69328]: value = "task-3273168" [ 772.594034] env[69328]: _type = "Task" [ 772.594034] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.603386] env[69328]: DEBUG oslo_vmware.api [None req-cce60ede-7212-4a64-a3f5-72aa1be1edc1 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': task-3273168, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.829306] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0478ed28-7dc6-4b0f-bd26-f0a4513f1e08 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.839205] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70ffa4ed-9f39-4818-bf2c-ecaec7418cde {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.878799] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7428a58c-cf84-4839-a771-e0426e43cfa9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.888741] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58a2fd17-db49-4a69-87d2-f5d19e415712 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.903875] env[69328]: DEBUG nova.compute.provider_tree [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 772.971609] env[69328]: DEBUG nova.network.neutron [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Updating instance_info_cache with network_info: [{"id": "fbe60697-372d-45c9-97c0-49ce01cbc064", "address": "fa:16:3e:e7:a0:1b", "network": {"id": "f29114eb-6d33-4dd9-8c6c-f817e73e3761", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1738105219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0ef63e916e324066a8feacfe8a4b6358", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7654928b-7afe-42e3-a18d-68ecc775cefe", "external-id": "cl2-zone-807", "segmentation_id": 807, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbe60697-37", "ovs_interfaceid": "fbe60697-372d-45c9-97c0-49ce01cbc064", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 772.978247] env[69328]: DEBUG nova.compute.manager [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] [instance: f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 772.995413] env[69328]: DEBUG oslo_vmware.api [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Task: {'id': task-3273165, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.011809] env[69328]: DEBUG nova.virt.hardware [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 773.012128] env[69328]: DEBUG nova.virt.hardware [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 773.012298] env[69328]: DEBUG nova.virt.hardware [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 773.012483] env[69328]: DEBUG nova.virt.hardware [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 773.012629] env[69328]: DEBUG nova.virt.hardware [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 773.012807] env[69328]: DEBUG nova.virt.hardware [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 773.012976] env[69328]: DEBUG nova.virt.hardware [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 773.013410] env[69328]: DEBUG nova.virt.hardware [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 773.013803] env[69328]: DEBUG nova.virt.hardware [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 773.013909] env[69328]: DEBUG nova.virt.hardware [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 773.014138] env[69328]: DEBUG nova.virt.hardware [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 773.015764] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c28d957-90ac-4a9e-9974-b04b5830b4fd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.030605] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6659c4f8-4d14-45ee-9286-6d9b1d8d89f3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.035374] env[69328]: DEBUG oslo_vmware.api [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3273166, 'name': Rename_Task, 'duration_secs': 0.164166} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.036365] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 773.037131] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b21f1b00-81af-40cd-bef8-56450d218695 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.047684] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] [instance: f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe] Instance VIF info [] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 773.056211] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Creating folder: Project (6fd26d270ef24cb08ea5e73ef25d4fe4). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 773.056211] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bd5d1535-7387-4975-8ec5-1c68e175f795 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.059906] env[69328]: DEBUG oslo_vmware.api [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Waiting for the task: (returnval){ [ 773.059906] env[69328]: value = "task-3273169" [ 773.059906] env[69328]: _type = "Task" [ 773.059906] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.065061] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Created folder: Project (6fd26d270ef24cb08ea5e73ef25d4fe4) in parent group-v653649. [ 773.065061] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Creating folder: Instances. Parent ref: group-v653782. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 773.065594] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6a6b7317-5176-46b0-a28f-9b26c40bdb5f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.070916] env[69328]: DEBUG oslo_vmware.api [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3273169, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.078232] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Created folder: Instances in parent group-v653782. [ 773.078411] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 773.078617] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 773.078824] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bc876618-27ad-49dd-8f5b-f29d53a0b364 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.098703] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 773.098703] env[69328]: value = "task-3273172" [ 773.098703] env[69328]: _type = "Task" [ 773.098703] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.106158] env[69328]: DEBUG oslo_vmware.api [None req-cce60ede-7212-4a64-a3f5-72aa1be1edc1 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': task-3273168, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.340009} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.106869] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-cce60ede-7212-4a64-a3f5-72aa1be1edc1 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 773.107168] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-cce60ede-7212-4a64-a3f5-72aa1be1edc1 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 5292b759-9d1f-486a-b4d6-90519b3ae986] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 773.107419] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-cce60ede-7212-4a64-a3f5-72aa1be1edc1 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 5292b759-9d1f-486a-b4d6-90519b3ae986] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 773.107653] env[69328]: INFO nova.compute.manager [None req-cce60ede-7212-4a64-a3f5-72aa1be1edc1 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 5292b759-9d1f-486a-b4d6-90519b3ae986] Took 1.71 seconds to destroy the instance on the hypervisor. [ 773.107969] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cce60ede-7212-4a64-a3f5-72aa1be1edc1 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 773.111297] env[69328]: DEBUG nova.compute.manager [-] [instance: 5292b759-9d1f-486a-b4d6-90519b3ae986] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 773.111464] env[69328]: DEBUG nova.network.neutron [-] [instance: 5292b759-9d1f-486a-b4d6-90519b3ae986] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 773.113121] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273172, 'name': CreateVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.406692] env[69328]: DEBUG nova.scheduler.client.report [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 773.474676] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Releasing lock "refresh_cache-18022645-9a2a-489e-b0b1-486165f46f14" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 773.475072] env[69328]: DEBUG nova.compute.manager [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Instance network_info: |[{"id": "fbe60697-372d-45c9-97c0-49ce01cbc064", "address": "fa:16:3e:e7:a0:1b", "network": {"id": "f29114eb-6d33-4dd9-8c6c-f817e73e3761", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1738105219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0ef63e916e324066a8feacfe8a4b6358", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7654928b-7afe-42e3-a18d-68ecc775cefe", "external-id": "cl2-zone-807", "segmentation_id": 807, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbe60697-37", "ovs_interfaceid": "fbe60697-372d-45c9-97c0-49ce01cbc064", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 773.476222] env[69328]: DEBUG oslo_concurrency.lockutils [req-7dc2d9a4-13a1-40bd-831f-747e01db5fe0 req-d177d2ed-2853-4074-9b79-789b6abddda3 service nova] Acquired lock "refresh_cache-18022645-9a2a-489e-b0b1-486165f46f14" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 773.476222] env[69328]: DEBUG nova.network.neutron [req-7dc2d9a4-13a1-40bd-831f-747e01db5fe0 req-d177d2ed-2853-4074-9b79-789b6abddda3 service nova] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Refreshing network info cache for port fbe60697-372d-45c9-97c0-49ce01cbc064 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 773.476862] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e7:a0:1b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7654928b-7afe-42e3-a18d-68ecc775cefe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fbe60697-372d-45c9-97c0-49ce01cbc064', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 773.485413] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Creating folder: Project (0ef63e916e324066a8feacfe8a4b6358). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 773.486199] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6c972191-0594-497a-99eb-217777222de9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.499456] env[69328]: DEBUG oslo_vmware.api [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Task: {'id': task-3273165, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.630678} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.499841] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] d724a141-35e7-4483-99aa-8a17066fb63b/d724a141-35e7-4483-99aa-8a17066fb63b.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 773.500139] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 773.500449] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-934edb3b-2590-4430-a19a-78c4f52d8a69 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.508320] env[69328]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 773.508616] env[69328]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=69328) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 773.509034] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Folder already exists: Project (0ef63e916e324066a8feacfe8a4b6358). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 773.509323] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Creating folder: Instances. Parent ref: group-v653716. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 773.510839] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-14ecd2bc-eb37-4b1c-beeb-52de134044fa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.512699] env[69328]: DEBUG oslo_vmware.api [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Waiting for the task: (returnval){ [ 773.512699] env[69328]: value = "task-3273174" [ 773.512699] env[69328]: _type = "Task" [ 773.512699] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.523285] env[69328]: DEBUG oslo_vmware.api [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Task: {'id': task-3273174, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.524647] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Created folder: Instances in parent group-v653716. [ 773.524961] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 773.525239] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 773.525511] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6c382921-2532-42ac-be73-7cf83ae0d3bb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.544515] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 773.544515] env[69328]: value = "task-3273176" [ 773.544515] env[69328]: _type = "Task" [ 773.544515] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.555196] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273176, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.569640] env[69328]: DEBUG oslo_vmware.api [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3273169, 'name': PowerOnVM_Task, 'duration_secs': 0.463947} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.569990] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 773.570606] env[69328]: INFO nova.compute.manager [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Took 9.23 seconds to spawn the instance on the hypervisor. [ 773.570906] env[69328]: DEBUG nova.compute.manager [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 773.571867] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c984e5d1-f068-40ee-b236-b7f92d906bbd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.608820] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273172, 'name': CreateVM_Task, 'duration_secs': 0.316226} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.608997] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 773.609613] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 773.609857] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 773.610290] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 773.610643] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b08f6f8c-913b-4467-a9ae-0af59d356cb7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.616163] env[69328]: DEBUG oslo_vmware.api [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Waiting for the task: (returnval){ [ 773.616163] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]528ff81d-eef5-acd0-57c8-0e01e471f666" [ 773.616163] env[69328]: _type = "Task" [ 773.616163] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.625480] env[69328]: DEBUG oslo_vmware.api [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]528ff81d-eef5-acd0-57c8-0e01e471f666, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.912345] env[69328]: DEBUG oslo_concurrency.lockutils [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.507s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 773.912888] env[69328]: DEBUG nova.compute.manager [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 773.917154] env[69328]: DEBUG oslo_concurrency.lockutils [None req-51927333-290d-4571-bbf0-9e7e32968770 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.892s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 773.917154] env[69328]: DEBUG oslo_concurrency.lockutils [None req-51927333-290d-4571-bbf0-9e7e32968770 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 773.919083] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.758s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 773.920630] env[69328]: INFO nova.compute.claims [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 773.946118] env[69328]: INFO nova.scheduler.client.report [None req-51927333-290d-4571-bbf0-9e7e32968770 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Deleted allocations for instance bbbfb48d-b474-4a6e-9078-336f23d2c343 [ 774.024234] env[69328]: DEBUG oslo_vmware.api [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Task: {'id': task-3273174, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06988} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.024559] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 774.026133] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cfdabc6-2960-44ca-a7dd-6de7fc04b029 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.055342] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] Reconfiguring VM instance instance-0000002b to attach disk [datastore1] d724a141-35e7-4483-99aa-8a17066fb63b/d724a141-35e7-4483-99aa-8a17066fb63b.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 774.056318] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f01a976d-4d7c-4787-b048-8e84b0344af3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.078601] env[69328]: DEBUG nova.network.neutron [-] [instance: 5292b759-9d1f-486a-b4d6-90519b3ae986] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 774.096317] env[69328]: INFO nova.compute.manager [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Took 51.14 seconds to build instance. [ 774.099439] env[69328]: DEBUG oslo_vmware.api [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Waiting for the task: (returnval){ [ 774.099439] env[69328]: value = "task-3273177" [ 774.099439] env[69328]: _type = "Task" [ 774.099439] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.104816] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273176, 'name': CreateVM_Task} progress is 25%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.116438] env[69328]: DEBUG oslo_vmware.api [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Task: {'id': task-3273177, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.126850] env[69328]: DEBUG oslo_vmware.api [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]528ff81d-eef5-acd0-57c8-0e01e471f666, 'name': SearchDatastore_Task, 'duration_secs': 0.010657} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.127368] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 774.130017] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] [instance: f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 774.130017] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.130017] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 774.130017] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 774.130017] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-30ce0c31-dea5-424e-8de5-99b3d94a5dd6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.138015] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 774.138393] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 774.139552] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8205ccf-2769-469f-9e6c-79072f85c849 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.147017] env[69328]: DEBUG oslo_vmware.api [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Waiting for the task: (returnval){ [ 774.147017] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]526fe6e4-092a-6e51-0a29-1d9461c0a9cc" [ 774.147017] env[69328]: _type = "Task" [ 774.147017] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.153091] env[69328]: DEBUG oslo_vmware.api [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]526fe6e4-092a-6e51-0a29-1d9461c0a9cc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.428707] env[69328]: DEBUG nova.compute.utils [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 774.433024] env[69328]: DEBUG nova.compute.manager [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 774.433024] env[69328]: DEBUG nova.network.neutron [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 774.440131] env[69328]: DEBUG nova.network.neutron [req-7dc2d9a4-13a1-40bd-831f-747e01db5fe0 req-d177d2ed-2853-4074-9b79-789b6abddda3 service nova] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Updated VIF entry in instance network info cache for port fbe60697-372d-45c9-97c0-49ce01cbc064. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 774.440131] env[69328]: DEBUG nova.network.neutron [req-7dc2d9a4-13a1-40bd-831f-747e01db5fe0 req-d177d2ed-2853-4074-9b79-789b6abddda3 service nova] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Updating instance_info_cache with network_info: [{"id": "fbe60697-372d-45c9-97c0-49ce01cbc064", "address": "fa:16:3e:e7:a0:1b", "network": {"id": "f29114eb-6d33-4dd9-8c6c-f817e73e3761", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1738105219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0ef63e916e324066a8feacfe8a4b6358", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7654928b-7afe-42e3-a18d-68ecc775cefe", "external-id": "cl2-zone-807", "segmentation_id": 807, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbe60697-37", "ovs_interfaceid": "fbe60697-372d-45c9-97c0-49ce01cbc064", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 774.455250] env[69328]: DEBUG oslo_concurrency.lockutils [None req-51927333-290d-4571-bbf0-9e7e32968770 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Lock "bbbfb48d-b474-4a6e-9078-336f23d2c343" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.019s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 774.509170] env[69328]: DEBUG nova.policy [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '001cde53877948ba91f0c4f26c438d65', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '79faf009f74c4bb59df1cc4c6b0dadd4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 774.568023] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273176, 'name': CreateVM_Task, 'duration_secs': 0.766977} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.568192] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 774.568832] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'mount_device': '/dev/sda', 'boot_index': 0, 'delete_on_termination': True, 'disk_bus': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653727', 'volume_id': 'aa5050fe-f367-4822-9aa7-4bfac9106402', 'name': 'volume-aa5050fe-f367-4822-9aa7-4bfac9106402', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '18022645-9a2a-489e-b0b1-486165f46f14', 'attached_at': '', 'detached_at': '', 'volume_id': 'aa5050fe-f367-4822-9aa7-4bfac9106402', 'serial': 'aa5050fe-f367-4822-9aa7-4bfac9106402'}, 'guest_format': None, 'device_type': None, 'attachment_id': '1161ad5a-2471-45ff-8f54-a5fecc883515', 'volume_type': None}], 'swap': None} {{(pid=69328) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 774.569036] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Root volume attach. Driver type: vmdk {{(pid=69328) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 774.569826] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89a31fad-7ac6-45f9-8e47-bb7a03381a90 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.578355] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b91a83e-9af8-4701-891c-fc80bc09b16d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.584655] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea15a4b9-5b9e-44bc-88e9-c2b45944b2af {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.587798] env[69328]: INFO nova.compute.manager [-] [instance: 5292b759-9d1f-486a-b4d6-90519b3ae986] Took 1.48 seconds to deallocate network for instance. [ 774.596018] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-c6e00c62-632f-4e6b-9b2d-047ad89b2038 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.600938] env[69328]: DEBUG oslo_concurrency.lockutils [None req-94a47f45-98b3-4ee7-960f-6f96e0d63a8f tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Lock "99e31dfd-5d41-4564-886f-becc25ca289c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 98.280s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 774.604114] env[69328]: DEBUG oslo_vmware.api [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Waiting for the task: (returnval){ [ 774.604114] env[69328]: value = "task-3273178" [ 774.604114] env[69328]: _type = "Task" [ 774.604114] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.616990] env[69328]: DEBUG oslo_vmware.api [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Task: {'id': task-3273178, 'name': RelocateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.620374] env[69328]: DEBUG oslo_vmware.api [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Task: {'id': task-3273177, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.658381] env[69328]: DEBUG oslo_vmware.api [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]526fe6e4-092a-6e51-0a29-1d9461c0a9cc, 'name': SearchDatastore_Task, 'duration_secs': 0.03162} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.659156] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-765eba74-c3b2-482b-9671-a7e01ba3b30f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.665713] env[69328]: DEBUG oslo_vmware.api [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Waiting for the task: (returnval){ [ 774.665713] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e67f84-84c5-a688-8705-c098481b4492" [ 774.665713] env[69328]: _type = "Task" [ 774.665713] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.673884] env[69328]: DEBUG oslo_vmware.api [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e67f84-84c5-a688-8705-c098481b4492, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.855410] env[69328]: DEBUG nova.compute.manager [req-e0b799b9-a20e-4b6c-9b9c-4b6c8909ccb6 req-670f0595-a54c-47d4-ab53-d8cd56b31140 service nova] [instance: 5292b759-9d1f-486a-b4d6-90519b3ae986] Received event network-vif-deleted-b53cf739-4e56-4bd3-85f1-7e758b465dd8 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 774.927560] env[69328]: DEBUG oslo_concurrency.lockutils [None req-44f3ab11-d601-4dcc-8461-7aed79509981 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Acquiring lock "5b0e8bef-dcfc-4c5e-89d2-aa1748050d29" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 774.927830] env[69328]: DEBUG oslo_concurrency.lockutils [None req-44f3ab11-d601-4dcc-8461-7aed79509981 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Lock "5b0e8bef-dcfc-4c5e-89d2-aa1748050d29" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 774.929029] env[69328]: DEBUG oslo_concurrency.lockutils [None req-44f3ab11-d601-4dcc-8461-7aed79509981 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Acquiring lock "5b0e8bef-dcfc-4c5e-89d2-aa1748050d29-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 774.929029] env[69328]: DEBUG oslo_concurrency.lockutils [None req-44f3ab11-d601-4dcc-8461-7aed79509981 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Lock "5b0e8bef-dcfc-4c5e-89d2-aa1748050d29-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 774.929029] env[69328]: DEBUG oslo_concurrency.lockutils [None req-44f3ab11-d601-4dcc-8461-7aed79509981 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Lock "5b0e8bef-dcfc-4c5e-89d2-aa1748050d29-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 774.930824] env[69328]: INFO nova.compute.manager [None req-44f3ab11-d601-4dcc-8461-7aed79509981 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Terminating instance [ 774.940215] env[69328]: DEBUG nova.compute.manager [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 774.949177] env[69328]: DEBUG oslo_concurrency.lockutils [req-7dc2d9a4-13a1-40bd-831f-747e01db5fe0 req-d177d2ed-2853-4074-9b79-789b6abddda3 service nova] Releasing lock "refresh_cache-18022645-9a2a-489e-b0b1-486165f46f14" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 775.026937] env[69328]: DEBUG nova.network.neutron [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Successfully created port: 33b9c9a0-82a8-4195-bb5a-d8e6e911ddc5 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 775.096834] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cce60ede-7212-4a64-a3f5-72aa1be1edc1 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 775.113673] env[69328]: DEBUG nova.compute.manager [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 775.140090] env[69328]: DEBUG oslo_vmware.api [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Task: {'id': task-3273178, 'name': RelocateVM_Task} progress is 34%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.145708] env[69328]: DEBUG oslo_vmware.api [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Task: {'id': task-3273177, 'name': ReconfigVM_Task, 'duration_secs': 0.586664} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.150306] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] Reconfigured VM instance instance-0000002b to attach disk [datastore1] d724a141-35e7-4483-99aa-8a17066fb63b/d724a141-35e7-4483-99aa-8a17066fb63b.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 775.151631] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2adb44d2-6b82-4e41-96f4-4d669b6a8a46 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.160970] env[69328]: DEBUG oslo_vmware.api [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Waiting for the task: (returnval){ [ 775.160970] env[69328]: value = "task-3273179" [ 775.160970] env[69328]: _type = "Task" [ 775.160970] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.178546] env[69328]: DEBUG oslo_vmware.api [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Task: {'id': task-3273179, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.190500] env[69328]: DEBUG oslo_vmware.api [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e67f84-84c5-a688-8705-c098481b4492, 'name': SearchDatastore_Task, 'duration_secs': 0.009391} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.190824] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 775.191136] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe/f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 775.192363] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5e2a4fc2-c485-47f1-9f3e-a34551f253aa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.203084] env[69328]: DEBUG oslo_vmware.api [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Waiting for the task: (returnval){ [ 775.203084] env[69328]: value = "task-3273180" [ 775.203084] env[69328]: _type = "Task" [ 775.203084] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.213142] env[69328]: DEBUG oslo_vmware.api [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Task: {'id': task-3273180, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.456026] env[69328]: DEBUG nova.compute.manager [None req-44f3ab11-d601-4dcc-8461-7aed79509981 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 775.456026] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-44f3ab11-d601-4dcc-8461-7aed79509981 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 775.458350] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39d9a127-e679-4955-865c-32a93f963cec {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.470845] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-44f3ab11-d601-4dcc-8461-7aed79509981 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 775.471685] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d0c08640-1918-4a4e-b1de-6dc362b98261 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.484925] env[69328]: DEBUG oslo_vmware.api [None req-44f3ab11-d601-4dcc-8461-7aed79509981 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Waiting for the task: (returnval){ [ 775.484925] env[69328]: value = "task-3273181" [ 775.484925] env[69328]: _type = "Task" [ 775.484925] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.500733] env[69328]: DEBUG oslo_vmware.api [None req-44f3ab11-d601-4dcc-8461-7aed79509981 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3273181, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.582566] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a8feba1-9b50-4e5f-b685-69a948e9e2bf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.594895] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a379fe1c-af90-4e12-856e-211376069d44 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.641796] env[69328]: INFO nova.compute.manager [None req-03d92c24-0877-49f9-afeb-4a9a12526153 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Rescuing [ 775.642105] env[69328]: DEBUG oslo_concurrency.lockutils [None req-03d92c24-0877-49f9-afeb-4a9a12526153 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Acquiring lock "refresh_cache-99e31dfd-5d41-4564-886f-becc25ca289c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 775.642228] env[69328]: DEBUG oslo_concurrency.lockutils [None req-03d92c24-0877-49f9-afeb-4a9a12526153 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Acquired lock "refresh_cache-99e31dfd-5d41-4564-886f-becc25ca289c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 775.642329] env[69328]: DEBUG nova.network.neutron [None req-03d92c24-0877-49f9-afeb-4a9a12526153 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 775.655488] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0493abf-c11e-48bf-b674-d25136c54d05 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.670522] env[69328]: DEBUG oslo_vmware.api [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Task: {'id': task-3273178, 'name': RelocateVM_Task} progress is 49%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.674500] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b6b1a57-d6d7-4087-b213-22eb41bcb704 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.683907] env[69328]: DEBUG oslo_vmware.api [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Task: {'id': task-3273179, 'name': Rename_Task, 'duration_secs': 0.247713} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.685162] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 775.685162] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9c565160-90fe-4b65-b14b-53ade4bde5e3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.698881] env[69328]: DEBUG nova.compute.provider_tree [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 775.701853] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 775.703201] env[69328]: DEBUG oslo_vmware.api [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Waiting for the task: (returnval){ [ 775.703201] env[69328]: value = "task-3273182" [ 775.703201] env[69328]: _type = "Task" [ 775.703201] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.717614] env[69328]: DEBUG oslo_vmware.api [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Task: {'id': task-3273182, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.721407] env[69328]: DEBUG oslo_vmware.api [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Task: {'id': task-3273180, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.959290] env[69328]: DEBUG nova.compute.manager [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 776.005128] env[69328]: DEBUG oslo_vmware.api [None req-44f3ab11-d601-4dcc-8461-7aed79509981 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3273181, 'name': PowerOffVM_Task, 'duration_secs': 0.484085} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.005128] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-44f3ab11-d601-4dcc-8461-7aed79509981 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 776.005128] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-44f3ab11-d601-4dcc-8461-7aed79509981 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 776.005128] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-42640a63-f69c-40c0-a6f9-b5b15d3a1bc2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.014748] env[69328]: DEBUG nova.virt.hardware [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 776.015109] env[69328]: DEBUG nova.virt.hardware [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 776.015294] env[69328]: DEBUG nova.virt.hardware [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 776.015483] env[69328]: DEBUG nova.virt.hardware [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 776.015627] env[69328]: DEBUG nova.virt.hardware [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 776.015770] env[69328]: DEBUG nova.virt.hardware [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 776.016244] env[69328]: DEBUG nova.virt.hardware [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 776.016471] env[69328]: DEBUG nova.virt.hardware [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 776.016800] env[69328]: DEBUG nova.virt.hardware [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 776.017105] env[69328]: DEBUG nova.virt.hardware [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 776.017676] env[69328]: DEBUG nova.virt.hardware [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 776.019792] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8dac1eb-66a1-4b5a-9bce-539dbd847154 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.029359] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac62fe0e-b13d-4380-babf-e31e2551c43b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.082522] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-44f3ab11-d601-4dcc-8461-7aed79509981 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 776.082762] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-44f3ab11-d601-4dcc-8461-7aed79509981 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 776.082922] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-44f3ab11-d601-4dcc-8461-7aed79509981 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Deleting the datastore file [datastore2] 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 776.083245] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7d8bb03d-8499-4ccf-b659-b3ba85fed3d4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.090750] env[69328]: DEBUG oslo_vmware.api [None req-44f3ab11-d601-4dcc-8461-7aed79509981 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Waiting for the task: (returnval){ [ 776.090750] env[69328]: value = "task-3273184" [ 776.090750] env[69328]: _type = "Task" [ 776.090750] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.103717] env[69328]: DEBUG oslo_vmware.api [None req-44f3ab11-d601-4dcc-8461-7aed79509981 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3273184, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.159287] env[69328]: DEBUG oslo_vmware.api [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Task: {'id': task-3273178, 'name': RelocateVM_Task} progress is 62%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.207136] env[69328]: DEBUG nova.scheduler.client.report [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 776.226397] env[69328]: DEBUG oslo_vmware.api [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Task: {'id': task-3273180, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.230434] env[69328]: DEBUG oslo_vmware.api [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Task: {'id': task-3273182, 'name': PowerOnVM_Task} progress is 87%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.591048] env[69328]: DEBUG nova.network.neutron [None req-03d92c24-0877-49f9-afeb-4a9a12526153 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Updating instance_info_cache with network_info: [{"id": "90f7115d-cbd5-42dd-a07a-5eb45deb5276", "address": "fa:16:3e:22:47:ec", "network": {"id": "032910e6-4d2e-415b-ac3e-ed7a7fadf536", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1432969230-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "690511a8725a4dd6ab796a15569293a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "be5c038c-29e5-43c9-91ab-9eb3094b5337", "external-id": "nsx-vlan-transportzone-511", "segmentation_id": 511, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90f7115d-cb", "ovs_interfaceid": "90f7115d-cbd5-42dd-a07a-5eb45deb5276", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 776.604964] env[69328]: DEBUG oslo_vmware.api [None req-44f3ab11-d601-4dcc-8461-7aed79509981 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3273184, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.653424] env[69328]: DEBUG oslo_vmware.api [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Task: {'id': task-3273178, 'name': RelocateVM_Task} progress is 75%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.721592] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.802s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 776.722667] env[69328]: DEBUG nova.compute.manager [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 776.733823] env[69328]: DEBUG oslo_concurrency.lockutils [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 30.427s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 776.736199] env[69328]: DEBUG nova.objects.instance [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69328) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 776.739109] env[69328]: DEBUG oslo_vmware.api [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Task: {'id': task-3273180, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.740587] env[69328]: DEBUG oslo_vmware.api [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Task: {'id': task-3273182, 'name': PowerOnVM_Task, 'duration_secs': 0.973154} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.740771] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 776.741080] env[69328]: INFO nova.compute.manager [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] Took 9.68 seconds to spawn the instance on the hypervisor. [ 776.741379] env[69328]: DEBUG nova.compute.manager [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 776.742376] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-579afb15-fec1-4830-8627-79080786ff9c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.102537] env[69328]: DEBUG oslo_concurrency.lockutils [None req-03d92c24-0877-49f9-afeb-4a9a12526153 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Releasing lock "refresh_cache-99e31dfd-5d41-4564-886f-becc25ca289c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 777.113059] env[69328]: DEBUG oslo_vmware.api [None req-44f3ab11-d601-4dcc-8461-7aed79509981 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Task: {'id': task-3273184, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.718023} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.113501] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-44f3ab11-d601-4dcc-8461-7aed79509981 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 777.113782] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-44f3ab11-d601-4dcc-8461-7aed79509981 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 777.113861] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-44f3ab11-d601-4dcc-8461-7aed79509981 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 777.114018] env[69328]: INFO nova.compute.manager [None req-44f3ab11-d601-4dcc-8461-7aed79509981 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Took 1.66 seconds to destroy the instance on the hypervisor. [ 777.114295] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-44f3ab11-d601-4dcc-8461-7aed79509981 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 777.114497] env[69328]: DEBUG nova.compute.manager [-] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 777.114603] env[69328]: DEBUG nova.network.neutron [-] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 777.162632] env[69328]: DEBUG oslo_vmware.api [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Task: {'id': task-3273178, 'name': RelocateVM_Task} progress is 88%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.226988] env[69328]: DEBUG oslo_vmware.api [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Task: {'id': task-3273180, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.622829} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.226988] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe/f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 777.227262] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] [instance: f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 777.227562] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1d6fdacf-c515-4cb6-a1c8-a71656341b9a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.239621] env[69328]: DEBUG oslo_vmware.api [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Waiting for the task: (returnval){ [ 777.239621] env[69328]: value = "task-3273185" [ 777.239621] env[69328]: _type = "Task" [ 777.239621] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.248406] env[69328]: DEBUG nova.compute.utils [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 777.255553] env[69328]: DEBUG nova.compute.manager [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 777.255775] env[69328]: DEBUG nova.network.neutron [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 777.271988] env[69328]: DEBUG oslo_vmware.api [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Task: {'id': task-3273185, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.278472] env[69328]: INFO nova.compute.manager [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] Took 50.40 seconds to build instance. [ 777.390087] env[69328]: DEBUG nova.policy [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '91ee0807be574796bec53919ecd5a934', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f2aed2695f2d437fbe9202124d2ed95b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 777.422312] env[69328]: DEBUG nova.network.neutron [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Successfully updated port: 33b9c9a0-82a8-4195-bb5a-d8e6e911ddc5 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 777.655218] env[69328]: DEBUG oslo_vmware.api [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Task: {'id': task-3273178, 'name': RelocateVM_Task} progress is 97%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.738292] env[69328]: DEBUG nova.compute.manager [req-396bdac8-b386-4cf6-9c9e-0b0fd629faf1 req-2c1a1154-3e25-45ff-a8d5-2a9f58427e4f service nova] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Received event network-vif-plugged-33b9c9a0-82a8-4195-bb5a-d8e6e911ddc5 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 777.738292] env[69328]: DEBUG oslo_concurrency.lockutils [req-396bdac8-b386-4cf6-9c9e-0b0fd629faf1 req-2c1a1154-3e25-45ff-a8d5-2a9f58427e4f service nova] Acquiring lock "8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 777.738600] env[69328]: DEBUG oslo_concurrency.lockutils [req-396bdac8-b386-4cf6-9c9e-0b0fd629faf1 req-2c1a1154-3e25-45ff-a8d5-2a9f58427e4f service nova] Lock "8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 777.738673] env[69328]: DEBUG oslo_concurrency.lockutils [req-396bdac8-b386-4cf6-9c9e-0b0fd629faf1 req-2c1a1154-3e25-45ff-a8d5-2a9f58427e4f service nova] Lock "8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 777.738821] env[69328]: DEBUG nova.compute.manager [req-396bdac8-b386-4cf6-9c9e-0b0fd629faf1 req-2c1a1154-3e25-45ff-a8d5-2a9f58427e4f service nova] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] No waiting events found dispatching network-vif-plugged-33b9c9a0-82a8-4195-bb5a-d8e6e911ddc5 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 777.738985] env[69328]: WARNING nova.compute.manager [req-396bdac8-b386-4cf6-9c9e-0b0fd629faf1 req-2c1a1154-3e25-45ff-a8d5-2a9f58427e4f service nova] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Received unexpected event network-vif-plugged-33b9c9a0-82a8-4195-bb5a-d8e6e911ddc5 for instance with vm_state building and task_state spawning. [ 777.751325] env[69328]: DEBUG oslo_vmware.api [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Task: {'id': task-3273185, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.178447} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.751616] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] [instance: f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 777.752425] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-722bce0e-c96a-46e6-b6d1-a058e276615e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.758014] env[69328]: DEBUG nova.compute.manager [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 777.768241] env[69328]: DEBUG oslo_concurrency.lockutils [None req-132ca4b8-4bae-4ac1-8d8c-ec0cac168316 tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.034s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 777.777391] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] [instance: f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe] Reconfiguring VM instance instance-0000002d to attach disk [datastore1] f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe/f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 777.778158] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.057s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 777.780309] env[69328]: INFO nova.compute.claims [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 777.782852] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-972c9019-f8bd-4d4a-99b6-24a224ebfc0c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.802315] env[69328]: DEBUG oslo_concurrency.lockutils [None req-18849c5c-7c4e-4186-8068-b389720e93a7 tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Lock "d724a141-35e7-4483-99aa-8a17066fb63b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 95.288s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 777.824876] env[69328]: DEBUG oslo_vmware.api [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Waiting for the task: (returnval){ [ 777.824876] env[69328]: value = "task-3273186" [ 777.824876] env[69328]: _type = "Task" [ 777.824876] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.840816] env[69328]: DEBUG oslo_vmware.api [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Task: {'id': task-3273186, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.927691] env[69328]: DEBUG oslo_concurrency.lockutils [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Acquiring lock "refresh_cache-8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 777.927691] env[69328]: DEBUG oslo_concurrency.lockutils [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Acquired lock "refresh_cache-8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 777.927691] env[69328]: DEBUG nova.network.neutron [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 778.108842] env[69328]: DEBUG nova.network.neutron [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Successfully created port: 32db9785-1822-4acf-9971-06db92f35c18 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 778.114857] env[69328]: DEBUG nova.network.neutron [-] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 778.158104] env[69328]: DEBUG oslo_vmware.api [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Task: {'id': task-3273178, 'name': RelocateVM_Task} progress is 97%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.303358] env[69328]: DEBUG nova.compute.manager [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 778.335584] env[69328]: DEBUG oslo_vmware.api [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Task: {'id': task-3273186, 'name': ReconfigVM_Task, 'duration_secs': 0.322726} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.335854] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] [instance: f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe] Reconfigured VM instance instance-0000002d to attach disk [datastore1] f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe/f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 778.336830] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1549e697-ffcb-4beb-89e0-5154f1fb306e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.345378] env[69328]: DEBUG oslo_vmware.api [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Waiting for the task: (returnval){ [ 778.345378] env[69328]: value = "task-3273187" [ 778.345378] env[69328]: _type = "Task" [ 778.345378] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.357743] env[69328]: DEBUG oslo_vmware.api [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Task: {'id': task-3273187, 'name': Rename_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.511544] env[69328]: DEBUG nova.network.neutron [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 778.620790] env[69328]: INFO nova.compute.manager [-] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Took 1.51 seconds to deallocate network for instance. [ 778.654304] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-03d92c24-0877-49f9-afeb-4a9a12526153 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 778.654419] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a319a816-7bce-4359-9642-b64d0150e49a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.659770] env[69328]: DEBUG oslo_vmware.api [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Task: {'id': task-3273178, 'name': RelocateVM_Task} progress is 97%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.666314] env[69328]: DEBUG oslo_vmware.api [None req-03d92c24-0877-49f9-afeb-4a9a12526153 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Waiting for the task: (returnval){ [ 778.666314] env[69328]: value = "task-3273188" [ 778.666314] env[69328]: _type = "Task" [ 778.666314] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.675147] env[69328]: DEBUG oslo_vmware.api [None req-03d92c24-0877-49f9-afeb-4a9a12526153 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3273188, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.785862] env[69328]: DEBUG nova.compute.manager [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 778.826835] env[69328]: DEBUG nova.virt.hardware [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:37:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='32e26fb7-ec83-4d85-ade8-a07c889bcc21',id=40,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-718846255',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 778.827896] env[69328]: DEBUG nova.virt.hardware [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 778.827896] env[69328]: DEBUG nova.virt.hardware [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 778.827896] env[69328]: DEBUG nova.virt.hardware [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 778.827896] env[69328]: DEBUG nova.virt.hardware [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 778.828255] env[69328]: DEBUG nova.virt.hardware [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 778.828307] env[69328]: DEBUG nova.virt.hardware [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 778.828466] env[69328]: DEBUG nova.virt.hardware [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 778.828638] env[69328]: DEBUG nova.virt.hardware [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 778.828828] env[69328]: DEBUG nova.virt.hardware [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 778.829152] env[69328]: DEBUG nova.virt.hardware [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 778.830649] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b65643e-69a9-4ccd-99bc-83f463a4cd17 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.835179] env[69328]: DEBUG oslo_concurrency.lockutils [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 778.836200] env[69328]: DEBUG nova.network.neutron [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Updating instance_info_cache with network_info: [{"id": "33b9c9a0-82a8-4195-bb5a-d8e6e911ddc5", "address": "fa:16:3e:55:c6:67", "network": {"id": "45671a9d-5017-4d83-b871-b0f453a85414", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-44603226-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "79faf009f74c4bb59df1cc4c6b0dadd4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d88bb07-f93c-45ca-bce7-230cb1f33833", "external-id": "nsx-vlan-transportzone-387", "segmentation_id": 387, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33b9c9a0-82", "ovs_interfaceid": "33b9c9a0-82a8-4195-bb5a-d8e6e911ddc5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 778.843306] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab4c3876-a91a-4e69-a003-e80c70db5bd4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.872533] env[69328]: DEBUG oslo_vmware.api [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Task: {'id': task-3273187, 'name': Rename_Task, 'duration_secs': 0.135846} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.873649] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] [instance: f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 778.873950] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3d175b89-5812-4be8-b02f-02cde36125de {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.883313] env[69328]: DEBUG oslo_vmware.api [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Waiting for the task: (returnval){ [ 778.883313] env[69328]: value = "task-3273189" [ 778.883313] env[69328]: _type = "Task" [ 778.883313] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.893192] env[69328]: DEBUG oslo_vmware.api [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Task: {'id': task-3273189, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.128172] env[69328]: DEBUG oslo_concurrency.lockutils [None req-44f3ab11-d601-4dcc-8461-7aed79509981 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 779.161335] env[69328]: DEBUG oslo_vmware.api [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Task: {'id': task-3273178, 'name': RelocateVM_Task} progress is 98%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.179379] env[69328]: DEBUG oslo_vmware.api [None req-03d92c24-0877-49f9-afeb-4a9a12526153 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3273188, 'name': PowerOffVM_Task, 'duration_secs': 0.212172} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.179651] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-03d92c24-0877-49f9-afeb-4a9a12526153 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 779.180492] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af02f4d0-58ea-45ef-ac47-b30d4f5dea23 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.205702] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddb4f629-8969-42ff-94d0-3c2ee0d5e1f1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.255320] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-03d92c24-0877-49f9-afeb-4a9a12526153 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 779.256128] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8ed2a4b7-6a65-4bf6-ac7f-16547d3e555c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.265663] env[69328]: DEBUG oslo_vmware.api [None req-03d92c24-0877-49f9-afeb-4a9a12526153 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Waiting for the task: (returnval){ [ 779.265663] env[69328]: value = "task-3273190" [ 779.265663] env[69328]: _type = "Task" [ 779.265663] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.279390] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-03d92c24-0877-49f9-afeb-4a9a12526153 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] VM already powered off {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 779.279602] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-03d92c24-0877-49f9-afeb-4a9a12526153 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 779.279861] env[69328]: DEBUG oslo_concurrency.lockutils [None req-03d92c24-0877-49f9-afeb-4a9a12526153 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 779.280021] env[69328]: DEBUG oslo_concurrency.lockutils [None req-03d92c24-0877-49f9-afeb-4a9a12526153 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 779.280232] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-03d92c24-0877-49f9-afeb-4a9a12526153 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 779.280511] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-49266942-7000-422a-b975-7179d71e57c7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.294317] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-03d92c24-0877-49f9-afeb-4a9a12526153 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 779.294497] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-03d92c24-0877-49f9-afeb-4a9a12526153 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 779.295270] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a74bd172-409a-4f14-a337-3214b40b8262 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.306125] env[69328]: DEBUG oslo_vmware.api [None req-03d92c24-0877-49f9-afeb-4a9a12526153 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Waiting for the task: (returnval){ [ 779.306125] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52c5a8cb-4a08-61aa-c05f-82ddc000cda7" [ 779.306125] env[69328]: _type = "Task" [ 779.306125] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.319583] env[69328]: DEBUG oslo_vmware.api [None req-03d92c24-0877-49f9-afeb-4a9a12526153 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52c5a8cb-4a08-61aa-c05f-82ddc000cda7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.344364] env[69328]: DEBUG oslo_concurrency.lockutils [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Releasing lock "refresh_cache-8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 779.344364] env[69328]: DEBUG nova.compute.manager [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Instance network_info: |[{"id": "33b9c9a0-82a8-4195-bb5a-d8e6e911ddc5", "address": "fa:16:3e:55:c6:67", "network": {"id": "45671a9d-5017-4d83-b871-b0f453a85414", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-44603226-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "79faf009f74c4bb59df1cc4c6b0dadd4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d88bb07-f93c-45ca-bce7-230cb1f33833", "external-id": "nsx-vlan-transportzone-387", "segmentation_id": 387, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33b9c9a0-82", "ovs_interfaceid": "33b9c9a0-82a8-4195-bb5a-d8e6e911ddc5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 779.345765] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:55:c6:67', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2d88bb07-f93c-45ca-bce7-230cb1f33833', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '33b9c9a0-82a8-4195-bb5a-d8e6e911ddc5', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 779.354330] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Creating folder: Project (79faf009f74c4bb59df1cc4c6b0dadd4). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 779.354715] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3fee34db-8dfc-47b4-b1db-48264233362e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.370022] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Created folder: Project (79faf009f74c4bb59df1cc4c6b0dadd4) in parent group-v653649. [ 779.370022] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Creating folder: Instances. Parent ref: group-v653787. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 779.370022] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9f57ebeb-42ff-4f59-a6d6-c34b2ec8d4fe {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.380937] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Created folder: Instances in parent group-v653787. [ 779.382507] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 779.382507] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 779.382507] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-67ca4d04-e990-4d63-9aef-617db1c7b409 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.411942] env[69328]: DEBUG oslo_vmware.api [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Task: {'id': task-3273189, 'name': PowerOnVM_Task, 'duration_secs': 0.468652} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.414207] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] [instance: f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 779.414207] env[69328]: INFO nova.compute.manager [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] [instance: f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe] Took 6.44 seconds to spawn the instance on the hypervisor. [ 779.414207] env[69328]: DEBUG nova.compute.manager [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] [instance: f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 779.414567] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 779.414567] env[69328]: value = "task-3273193" [ 779.414567] env[69328]: _type = "Task" [ 779.414567] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.416344] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8f430c0-2787-43d2-8a58-a474a4b1adee {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.422396] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0f278cf-8be7-41fd-9b26-d39332d1399a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.437016] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273193, 'name': CreateVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.440540] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9c2dfca-2271-47dd-a038-d66a41b9e2f6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.482821] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae8bba56-1684-4d16-b337-7984009a8bdf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.493043] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ec8625c-0304-4ca4-a35c-72f4dd0ab5bf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.506640] env[69328]: DEBUG nova.compute.provider_tree [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 779.662312] env[69328]: DEBUG oslo_vmware.api [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Task: {'id': task-3273178, 'name': RelocateVM_Task, 'duration_secs': 4.655473} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.662312] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Volume attach. Driver type: vmdk {{(pid=69328) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 779.662312] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653727', 'volume_id': 'aa5050fe-f367-4822-9aa7-4bfac9106402', 'name': 'volume-aa5050fe-f367-4822-9aa7-4bfac9106402', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '18022645-9a2a-489e-b0b1-486165f46f14', 'attached_at': '', 'detached_at': '', 'volume_id': 'aa5050fe-f367-4822-9aa7-4bfac9106402', 'serial': 'aa5050fe-f367-4822-9aa7-4bfac9106402'} {{(pid=69328) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 779.664225] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20e72c7f-973f-44e1-bb66-5a7b14fdbd7d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.684696] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86ccd83c-6172-40ee-a48b-a5b8b241453d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.713740] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Reconfiguring VM instance instance-0000002c to attach disk [datastore1] volume-aa5050fe-f367-4822-9aa7-4bfac9106402/volume-aa5050fe-f367-4822-9aa7-4bfac9106402.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 779.715733] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4f7340bc-c021-4cf2-ae45-a18cb32d2de7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.738742] env[69328]: DEBUG oslo_vmware.api [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Waiting for the task: (returnval){ [ 779.738742] env[69328]: value = "task-3273194" [ 779.738742] env[69328]: _type = "Task" [ 779.738742] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.747818] env[69328]: DEBUG oslo_vmware.api [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Task: {'id': task-3273194, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.819977] env[69328]: DEBUG oslo_vmware.api [None req-03d92c24-0877-49f9-afeb-4a9a12526153 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52c5a8cb-4a08-61aa-c05f-82ddc000cda7, 'name': SearchDatastore_Task, 'duration_secs': 0.014183} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.821125] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5601d8d-0f06-4829-91df-620aeadb2b94 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.827896] env[69328]: DEBUG oslo_vmware.api [None req-03d92c24-0877-49f9-afeb-4a9a12526153 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Waiting for the task: (returnval){ [ 779.827896] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e22da2-a682-f51e-49c2-070ea3d54389" [ 779.827896] env[69328]: _type = "Task" [ 779.827896] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.839803] env[69328]: DEBUG oslo_vmware.api [None req-03d92c24-0877-49f9-afeb-4a9a12526153 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e22da2-a682-f51e-49c2-070ea3d54389, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.927340] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273193, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.955093] env[69328]: INFO nova.compute.manager [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] [instance: f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe] Took 47.21 seconds to build instance. [ 780.009592] env[69328]: DEBUG nova.scheduler.client.report [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 780.195756] env[69328]: DEBUG nova.compute.manager [req-b7f5daf7-b41f-4db2-9e6c-a1e53a399e7b req-56e11623-661f-44f7-be2f-ed93743f3dad service nova] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Received event network-changed-33b9c9a0-82a8-4195-bb5a-d8e6e911ddc5 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 780.195756] env[69328]: DEBUG nova.compute.manager [req-b7f5daf7-b41f-4db2-9e6c-a1e53a399e7b req-56e11623-661f-44f7-be2f-ed93743f3dad service nova] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Refreshing instance network info cache due to event network-changed-33b9c9a0-82a8-4195-bb5a-d8e6e911ddc5. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 780.195960] env[69328]: DEBUG oslo_concurrency.lockutils [req-b7f5daf7-b41f-4db2-9e6c-a1e53a399e7b req-56e11623-661f-44f7-be2f-ed93743f3dad service nova] Acquiring lock "refresh_cache-8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 780.196265] env[69328]: DEBUG oslo_concurrency.lockutils [req-b7f5daf7-b41f-4db2-9e6c-a1e53a399e7b req-56e11623-661f-44f7-be2f-ed93743f3dad service nova] Acquired lock "refresh_cache-8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 780.196483] env[69328]: DEBUG nova.network.neutron [req-b7f5daf7-b41f-4db2-9e6c-a1e53a399e7b req-56e11623-661f-44f7-be2f-ed93743f3dad service nova] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Refreshing network info cache for port 33b9c9a0-82a8-4195-bb5a-d8e6e911ddc5 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 780.250196] env[69328]: DEBUG oslo_vmware.api [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Task: {'id': task-3273194, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.341414] env[69328]: DEBUG oslo_vmware.api [None req-03d92c24-0877-49f9-afeb-4a9a12526153 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e22da2-a682-f51e-49c2-070ea3d54389, 'name': SearchDatastore_Task, 'duration_secs': 0.010287} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.341708] env[69328]: DEBUG oslo_concurrency.lockutils [None req-03d92c24-0877-49f9-afeb-4a9a12526153 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 780.341976] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-03d92c24-0877-49f9-afeb-4a9a12526153 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 99e31dfd-5d41-4564-886f-becc25ca289c/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318-rescue.vmdk. {{(pid=69328) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 780.342265] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8ffa09e1-81c7-4e2b-b583-efeb273def3e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.349508] env[69328]: DEBUG oslo_vmware.api [None req-03d92c24-0877-49f9-afeb-4a9a12526153 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Waiting for the task: (returnval){ [ 780.349508] env[69328]: value = "task-3273195" [ 780.349508] env[69328]: _type = "Task" [ 780.349508] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.358163] env[69328]: DEBUG oslo_vmware.api [None req-03d92c24-0877-49f9-afeb-4a9a12526153 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3273195, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.366740] env[69328]: DEBUG nova.network.neutron [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Successfully updated port: 32db9785-1822-4acf-9971-06db92f35c18 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 780.431191] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273193, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.456958] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bfb4b988-2e0a-4aec-8b0f-73c313b55fbc tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Lock "f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 85.565s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 780.519959] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.742s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 780.520727] env[69328]: DEBUG nova.compute.manager [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 780.523722] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fd9effd8-506c-4bc3-8d83-942135c74d2b tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.928s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 780.523939] env[69328]: DEBUG nova.objects.instance [None req-fd9effd8-506c-4bc3-8d83-942135c74d2b tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Lazy-loading 'resources' on Instance uuid 732342ea-2f73-40ea-a826-883ddc7a385a {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 780.616788] env[69328]: DEBUG nova.compute.manager [req-f3cc6962-f218-4690-8d16-75947a65b7a1 req-cebfd917-752d-4f56-b8c9-0dc4a1640b5c service nova] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Received event network-vif-plugged-32db9785-1822-4acf-9971-06db92f35c18 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 780.617103] env[69328]: DEBUG oslo_concurrency.lockutils [req-f3cc6962-f218-4690-8d16-75947a65b7a1 req-cebfd917-752d-4f56-b8c9-0dc4a1640b5c service nova] Acquiring lock "25fb207b-9388-4198-bb48-ab7cebd43375-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 780.617250] env[69328]: DEBUG oslo_concurrency.lockutils [req-f3cc6962-f218-4690-8d16-75947a65b7a1 req-cebfd917-752d-4f56-b8c9-0dc4a1640b5c service nova] Lock "25fb207b-9388-4198-bb48-ab7cebd43375-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 780.617416] env[69328]: DEBUG oslo_concurrency.lockutils [req-f3cc6962-f218-4690-8d16-75947a65b7a1 req-cebfd917-752d-4f56-b8c9-0dc4a1640b5c service nova] Lock "25fb207b-9388-4198-bb48-ab7cebd43375-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 780.617582] env[69328]: DEBUG nova.compute.manager [req-f3cc6962-f218-4690-8d16-75947a65b7a1 req-cebfd917-752d-4f56-b8c9-0dc4a1640b5c service nova] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] No waiting events found dispatching network-vif-plugged-32db9785-1822-4acf-9971-06db92f35c18 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 780.617746] env[69328]: WARNING nova.compute.manager [req-f3cc6962-f218-4690-8d16-75947a65b7a1 req-cebfd917-752d-4f56-b8c9-0dc4a1640b5c service nova] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Received unexpected event network-vif-plugged-32db9785-1822-4acf-9971-06db92f35c18 for instance with vm_state building and task_state spawning. [ 780.753593] env[69328]: DEBUG oslo_vmware.api [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Task: {'id': task-3273194, 'name': ReconfigVM_Task, 'duration_secs': 0.689801} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.754023] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Reconfigured VM instance instance-0000002c to attach disk [datastore1] volume-aa5050fe-f367-4822-9aa7-4bfac9106402/volume-aa5050fe-f367-4822-9aa7-4bfac9106402.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 780.758643] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-031a49a6-d5dd-496e-b149-70a9edc79ca4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.775558] env[69328]: DEBUG oslo_vmware.api [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Waiting for the task: (returnval){ [ 780.775558] env[69328]: value = "task-3273196" [ 780.775558] env[69328]: _type = "Task" [ 780.775558] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.790095] env[69328]: DEBUG oslo_vmware.api [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Task: {'id': task-3273196, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.866040] env[69328]: DEBUG oslo_vmware.api [None req-03d92c24-0877-49f9-afeb-4a9a12526153 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3273195, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.871368] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Acquiring lock "refresh_cache-25fb207b-9388-4198-bb48-ab7cebd43375" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 780.871625] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Acquired lock "refresh_cache-25fb207b-9388-4198-bb48-ab7cebd43375" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 780.871879] env[69328]: DEBUG nova.network.neutron [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 780.932969] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273193, 'name': CreateVM_Task, 'duration_secs': 1.065102} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.933552] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 780.934388] env[69328]: DEBUG oslo_concurrency.lockutils [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 780.935199] env[69328]: DEBUG oslo_concurrency.lockutils [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 780.935617] env[69328]: DEBUG oslo_concurrency.lockutils [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 780.935932] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-84abbd54-9d55-4091-bcbc-cc6291503435 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.941852] env[69328]: DEBUG oslo_vmware.api [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Waiting for the task: (returnval){ [ 780.941852] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52be3f0e-4ca6-b75e-9e45-970663d34f7b" [ 780.941852] env[69328]: _type = "Task" [ 780.941852] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.952831] env[69328]: DEBUG oslo_vmware.api [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52be3f0e-4ca6-b75e-9e45-970663d34f7b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.957153] env[69328]: DEBUG nova.network.neutron [req-b7f5daf7-b41f-4db2-9e6c-a1e53a399e7b req-56e11623-661f-44f7-be2f-ed93743f3dad service nova] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Updated VIF entry in instance network info cache for port 33b9c9a0-82a8-4195-bb5a-d8e6e911ddc5. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 780.957153] env[69328]: DEBUG nova.network.neutron [req-b7f5daf7-b41f-4db2-9e6c-a1e53a399e7b req-56e11623-661f-44f7-be2f-ed93743f3dad service nova] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Updating instance_info_cache with network_info: [{"id": "33b9c9a0-82a8-4195-bb5a-d8e6e911ddc5", "address": "fa:16:3e:55:c6:67", "network": {"id": "45671a9d-5017-4d83-b871-b0f453a85414", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-44603226-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "79faf009f74c4bb59df1cc4c6b0dadd4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d88bb07-f93c-45ca-bce7-230cb1f33833", "external-id": "nsx-vlan-transportzone-387", "segmentation_id": 387, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33b9c9a0-82", "ovs_interfaceid": "33b9c9a0-82a8-4195-bb5a-d8e6e911ddc5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 780.962117] env[69328]: DEBUG nova.compute.manager [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 781.026347] env[69328]: DEBUG nova.compute.utils [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 781.030502] env[69328]: DEBUG nova.compute.manager [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 781.030684] env[69328]: DEBUG nova.network.neutron [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 781.089230] env[69328]: DEBUG nova.policy [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'adea8f3e148442e691f99af03f894a4c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1467d48a61f7410b8f6d5a981d169563', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 781.286128] env[69328]: DEBUG oslo_vmware.api [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Task: {'id': task-3273196, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.349868] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5c10a2ba-3a2a-429c-9b99-15e12f9449ea tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Acquiring lock "d724a141-35e7-4483-99aa-8a17066fb63b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 781.351284] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5c10a2ba-3a2a-429c-9b99-15e12f9449ea tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Lock "d724a141-35e7-4483-99aa-8a17066fb63b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 781.351633] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5c10a2ba-3a2a-429c-9b99-15e12f9449ea tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Acquiring lock "d724a141-35e7-4483-99aa-8a17066fb63b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 781.351944] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5c10a2ba-3a2a-429c-9b99-15e12f9449ea tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Lock "d724a141-35e7-4483-99aa-8a17066fb63b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 781.352245] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5c10a2ba-3a2a-429c-9b99-15e12f9449ea tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Lock "d724a141-35e7-4483-99aa-8a17066fb63b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 781.354423] env[69328]: INFO nova.compute.manager [None req-5c10a2ba-3a2a-429c-9b99-15e12f9449ea tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] Terminating instance [ 781.365899] env[69328]: DEBUG oslo_vmware.api [None req-03d92c24-0877-49f9-afeb-4a9a12526153 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3273195, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.577325} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.366273] env[69328]: INFO nova.virt.vmwareapi.ds_util [None req-03d92c24-0877-49f9-afeb-4a9a12526153 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 99e31dfd-5d41-4564-886f-becc25ca289c/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318-rescue.vmdk. [ 781.367041] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23ed0de0-abe4-40ce-ac66-3038330a572b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.403385] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-03d92c24-0877-49f9-afeb-4a9a12526153 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Reconfiguring VM instance instance-0000002a to attach disk [datastore2] 99e31dfd-5d41-4564-886f-becc25ca289c/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318-rescue.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 781.406240] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-44751bf7-9c12-4084-8b85-fbf53a983280 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.426498] env[69328]: DEBUG oslo_vmware.api [None req-03d92c24-0877-49f9-afeb-4a9a12526153 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Waiting for the task: (returnval){ [ 781.426498] env[69328]: value = "task-3273197" [ 781.426498] env[69328]: _type = "Task" [ 781.426498] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.438306] env[69328]: DEBUG oslo_vmware.api [None req-03d92c24-0877-49f9-afeb-4a9a12526153 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3273197, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.451220] env[69328]: DEBUG oslo_vmware.api [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52be3f0e-4ca6-b75e-9e45-970663d34f7b, 'name': SearchDatastore_Task, 'duration_secs': 0.031993} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.451581] env[69328]: DEBUG oslo_concurrency.lockutils [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 781.451854] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 781.452171] env[69328]: DEBUG oslo_concurrency.lockutils [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 781.452354] env[69328]: DEBUG oslo_concurrency.lockutils [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 781.452606] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 781.453042] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-70b7db70-00d6-4ee4-9d67-6c6f87e15aef {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.460133] env[69328]: DEBUG nova.network.neutron [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 781.462430] env[69328]: DEBUG oslo_concurrency.lockutils [req-b7f5daf7-b41f-4db2-9e6c-a1e53a399e7b req-56e11623-661f-44f7-be2f-ed93743f3dad service nova] Releasing lock "refresh_cache-8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 781.462975] env[69328]: DEBUG nova.compute.manager [req-b7f5daf7-b41f-4db2-9e6c-a1e53a399e7b req-56e11623-661f-44f7-be2f-ed93743f3dad service nova] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Received event network-vif-deleted-6a14c441-36e5-4670-8f21-54b1113b23ff {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 781.474590] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 781.474590] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 781.475064] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe584f0c-ec6d-43b0-a64c-90318196c9d9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.484777] env[69328]: DEBUG oslo_vmware.api [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Waiting for the task: (returnval){ [ 781.484777] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]526f6527-234b-baa8-1fa2-1d603eb05f4e" [ 781.484777] env[69328]: _type = "Task" [ 781.484777] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.491443] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 781.491945] env[69328]: DEBUG nova.compute.manager [None req-73a0efe5-4703-45e0-b6b4-5e88cb572f24 tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] [instance: f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 781.496803] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-776ec5ec-a9ce-4b3c-becb-c81d73f01c8f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.499469] env[69328]: DEBUG oslo_vmware.api [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]526f6527-234b-baa8-1fa2-1d603eb05f4e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.535478] env[69328]: DEBUG nova.compute.utils [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 781.543428] env[69328]: DEBUG nova.network.neutron [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Successfully created port: 4ffb2723-2cb7-4f04-8e1b-208a6329288e {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 781.584543] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc7ca193-aec8-4a7d-afa3-0a744d5dd7ac {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.592627] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaa500e5-6611-49d7-9277-be5fb6de7c7d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.630148] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aebf9c42-1f51-4a79-a140-2bac4636ab46 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.637274] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-973f1e87-1094-4ca8-9407-dd3e75d1bed4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.650926] env[69328]: DEBUG nova.compute.provider_tree [None req-fd9effd8-506c-4bc3-8d83-942135c74d2b tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 781.736405] env[69328]: DEBUG nova.network.neutron [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Updating instance_info_cache with network_info: [{"id": "32db9785-1822-4acf-9971-06db92f35c18", "address": "fa:16:3e:3e:aa:b8", "network": {"id": "4707aea3-ce46-4fe0-bf5c-141ee5596a86", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.119", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ecd184c6b78b4e4297fb93abb94aa37d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32db9785-18", "ovs_interfaceid": "32db9785-1822-4acf-9971-06db92f35c18", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 781.789276] env[69328]: DEBUG oslo_vmware.api [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Task: {'id': task-3273196, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.861517] env[69328]: DEBUG nova.compute.manager [None req-5c10a2ba-3a2a-429c-9b99-15e12f9449ea tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 781.861750] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-5c10a2ba-3a2a-429c-9b99-15e12f9449ea tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 781.862768] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c34a6a62-1393-41bf-8b7a-a9448622d3ff {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.871209] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c10a2ba-3a2a-429c-9b99-15e12f9449ea tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 781.871515] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a2edb2c4-9541-4051-82ba-069811421205 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.878498] env[69328]: DEBUG oslo_vmware.api [None req-5c10a2ba-3a2a-429c-9b99-15e12f9449ea tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Waiting for the task: (returnval){ [ 781.878498] env[69328]: value = "task-3273198" [ 781.878498] env[69328]: _type = "Task" [ 781.878498] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.888764] env[69328]: DEBUG oslo_vmware.api [None req-5c10a2ba-3a2a-429c-9b99-15e12f9449ea tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Task: {'id': task-3273198, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.937842] env[69328]: DEBUG oslo_vmware.api [None req-03d92c24-0877-49f9-afeb-4a9a12526153 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3273197, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.996795] env[69328]: DEBUG oslo_vmware.api [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]526f6527-234b-baa8-1fa2-1d603eb05f4e, 'name': SearchDatastore_Task, 'duration_secs': 0.014569} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.997840] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c8bb525-ce02-4b7f-bd0b-f3603d9a8cc6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.004453] env[69328]: DEBUG oslo_vmware.api [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Waiting for the task: (returnval){ [ 782.004453] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52350427-e234-0b0d-6214-f5afe4ff2862" [ 782.004453] env[69328]: _type = "Task" [ 782.004453] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.014897] env[69328]: INFO nova.compute.manager [None req-73a0efe5-4703-45e0-b6b4-5e88cb572f24 tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] [instance: f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe] instance snapshotting [ 782.019471] env[69328]: DEBUG nova.objects.instance [None req-73a0efe5-4703-45e0-b6b4-5e88cb572f24 tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Lazy-loading 'flavor' on Instance uuid f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 782.022810] env[69328]: DEBUG oslo_vmware.api [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52350427-e234-0b0d-6214-f5afe4ff2862, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.043707] env[69328]: DEBUG nova.compute.manager [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 782.154430] env[69328]: DEBUG nova.scheduler.client.report [None req-fd9effd8-506c-4bc3-8d83-942135c74d2b tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 782.239265] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Releasing lock "refresh_cache-25fb207b-9388-4198-bb48-ab7cebd43375" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 782.239550] env[69328]: DEBUG nova.compute.manager [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Instance network_info: |[{"id": "32db9785-1822-4acf-9971-06db92f35c18", "address": "fa:16:3e:3e:aa:b8", "network": {"id": "4707aea3-ce46-4fe0-bf5c-141ee5596a86", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.119", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ecd184c6b78b4e4297fb93abb94aa37d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32db9785-18", "ovs_interfaceid": "32db9785-1822-4acf-9971-06db92f35c18", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 782.240081] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3e:aa:b8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a8b99a46-3e7f-4ef1-9e45-58e6cd17f210', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '32db9785-1822-4acf-9971-06db92f35c18', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 782.250160] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 782.250884] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 782.251199] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ccf14f74-3fd1-48fb-8cb9-61178ed07f19 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.277090] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 782.277090] env[69328]: value = "task-3273199" [ 782.277090] env[69328]: _type = "Task" [ 782.277090] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.288651] env[69328]: DEBUG oslo_vmware.api [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Task: {'id': task-3273196, 'name': ReconfigVM_Task, 'duration_secs': 1.193453} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.291569] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653727', 'volume_id': 'aa5050fe-f367-4822-9aa7-4bfac9106402', 'name': 'volume-aa5050fe-f367-4822-9aa7-4bfac9106402', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '18022645-9a2a-489e-b0b1-486165f46f14', 'attached_at': '', 'detached_at': '', 'volume_id': 'aa5050fe-f367-4822-9aa7-4bfac9106402', 'serial': 'aa5050fe-f367-4822-9aa7-4bfac9106402'} {{(pid=69328) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 782.292012] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273199, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.293743] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-757a3e94-a115-424b-b17c-41bd386f5fb4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.299345] env[69328]: DEBUG oslo_vmware.api [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Waiting for the task: (returnval){ [ 782.299345] env[69328]: value = "task-3273200" [ 782.299345] env[69328]: _type = "Task" [ 782.299345] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.309168] env[69328]: DEBUG oslo_vmware.api [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Task: {'id': task-3273200, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.388802] env[69328]: DEBUG oslo_vmware.api [None req-5c10a2ba-3a2a-429c-9b99-15e12f9449ea tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Task: {'id': task-3273198, 'name': PowerOffVM_Task, 'duration_secs': 0.244173} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.389131] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c10a2ba-3a2a-429c-9b99-15e12f9449ea tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 782.389307] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-5c10a2ba-3a2a-429c-9b99-15e12f9449ea tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 782.389560] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6dcf02e8-b69d-4567-90ba-021bac094ed6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.437250] env[69328]: DEBUG oslo_vmware.api [None req-03d92c24-0877-49f9-afeb-4a9a12526153 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3273197, 'name': ReconfigVM_Task, 'duration_secs': 0.548192} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.437581] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-03d92c24-0877-49f9-afeb-4a9a12526153 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Reconfigured VM instance instance-0000002a to attach disk [datastore2] 99e31dfd-5d41-4564-886f-becc25ca289c/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318-rescue.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 782.441630] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48fc0a81-2c92-4552-a3f0-951e0eff7800 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.477507] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f7bc2c6b-6a67-4879-a5b4-7769cd43b041 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.489229] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-5c10a2ba-3a2a-429c-9b99-15e12f9449ea tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 782.489508] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-5c10a2ba-3a2a-429c-9b99-15e12f9449ea tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 782.492169] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c10a2ba-3a2a-429c-9b99-15e12f9449ea tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Deleting the datastore file [datastore1] d724a141-35e7-4483-99aa-8a17066fb63b {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 782.492169] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e075ca1d-7f95-4c53-a548-faaad41074cc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.497417] env[69328]: DEBUG oslo_vmware.api [None req-03d92c24-0877-49f9-afeb-4a9a12526153 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Waiting for the task: (returnval){ [ 782.497417] env[69328]: value = "task-3273202" [ 782.497417] env[69328]: _type = "Task" [ 782.497417] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.500033] env[69328]: DEBUG oslo_vmware.api [None req-5c10a2ba-3a2a-429c-9b99-15e12f9449ea tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Waiting for the task: (returnval){ [ 782.500033] env[69328]: value = "task-3273203" [ 782.500033] env[69328]: _type = "Task" [ 782.500033] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.514608] env[69328]: DEBUG oslo_vmware.api [None req-03d92c24-0877-49f9-afeb-4a9a12526153 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3273202, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.519731] env[69328]: DEBUG oslo_vmware.api [None req-5c10a2ba-3a2a-429c-9b99-15e12f9449ea tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Task: {'id': task-3273203, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.524707] env[69328]: DEBUG oslo_vmware.api [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52350427-e234-0b0d-6214-f5afe4ff2862, 'name': SearchDatastore_Task, 'duration_secs': 0.012391} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.526986] env[69328]: DEBUG oslo_concurrency.lockutils [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 782.527879] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4/8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 782.527879] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a2ab6121-aee3-428d-8464-74f04fdbad7a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.530391] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdb4fdf9-8ff5-4a84-a56a-c47f893eaced {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.556233] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c01f7286-e87c-4b39-8983-049025738861 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.558781] env[69328]: DEBUG oslo_concurrency.lockutils [None req-da255cd1-6657-45d4-a96b-d26ccab796c8 tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Acquiring lock "f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 782.559134] env[69328]: DEBUG oslo_concurrency.lockutils [None req-da255cd1-6657-45d4-a96b-d26ccab796c8 tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Lock "f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 782.566101] env[69328]: DEBUG oslo_concurrency.lockutils [None req-da255cd1-6657-45d4-a96b-d26ccab796c8 tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Acquiring lock "f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 782.566101] env[69328]: DEBUG oslo_concurrency.lockutils [None req-da255cd1-6657-45d4-a96b-d26ccab796c8 tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Lock "f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.005s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 782.566101] env[69328]: DEBUG oslo_concurrency.lockutils [None req-da255cd1-6657-45d4-a96b-d26ccab796c8 tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Lock "f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 782.566336] env[69328]: DEBUG oslo_vmware.api [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Waiting for the task: (returnval){ [ 782.566336] env[69328]: value = "task-3273204" [ 782.566336] env[69328]: _type = "Task" [ 782.566336] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.571380] env[69328]: INFO nova.compute.manager [None req-da255cd1-6657-45d4-a96b-d26ccab796c8 tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] [instance: f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe] Terminating instance [ 782.586458] env[69328]: DEBUG oslo_vmware.api [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Task: {'id': task-3273204, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.660679] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fd9effd8-506c-4bc3-8d83-942135c74d2b tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.137s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 782.663170] env[69328]: DEBUG oslo_concurrency.lockutils [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.602s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 782.664828] env[69328]: INFO nova.compute.claims [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 782.688464] env[69328]: INFO nova.scheduler.client.report [None req-fd9effd8-506c-4bc3-8d83-942135c74d2b tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Deleted allocations for instance 732342ea-2f73-40ea-a826-883ddc7a385a [ 782.789640] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273199, 'name': CreateVM_Task, 'duration_secs': 0.380582} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.789948] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 782.790522] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 782.790693] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 782.791022] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 782.791312] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e6c2e08-f513-40d3-8986-7cdabaf900c0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.797683] env[69328]: DEBUG oslo_vmware.api [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Waiting for the task: (returnval){ [ 782.797683] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52106ca7-0c0f-5b59-18d3-2b4f17e58932" [ 782.797683] env[69328]: _type = "Task" [ 782.797683] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.805981] env[69328]: DEBUG oslo_vmware.api [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52106ca7-0c0f-5b59-18d3-2b4f17e58932, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.811411] env[69328]: DEBUG oslo_vmware.api [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Task: {'id': task-3273200, 'name': Rename_Task, 'duration_secs': 0.181971} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.811411] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 782.811411] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-232e5b9d-8ed3-4281-9cc4-60767c5ccdbe {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.817084] env[69328]: DEBUG oslo_vmware.api [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Waiting for the task: (returnval){ [ 782.817084] env[69328]: value = "task-3273205" [ 782.817084] env[69328]: _type = "Task" [ 782.817084] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.824441] env[69328]: DEBUG oslo_vmware.api [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Task: {'id': task-3273205, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.015354] env[69328]: DEBUG oslo_vmware.api [None req-03d92c24-0877-49f9-afeb-4a9a12526153 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3273202, 'name': ReconfigVM_Task, 'duration_secs': 0.178692} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.018975] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-03d92c24-0877-49f9-afeb-4a9a12526153 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 783.019396] env[69328]: DEBUG oslo_vmware.api [None req-5c10a2ba-3a2a-429c-9b99-15e12f9449ea tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Task: {'id': task-3273203, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.218679} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.019625] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1bfd35ef-52f3-4589-8810-f8d6b875ca2e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.021754] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c10a2ba-3a2a-429c-9b99-15e12f9449ea tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 783.021754] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-5c10a2ba-3a2a-429c-9b99-15e12f9449ea tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 783.021754] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-5c10a2ba-3a2a-429c-9b99-15e12f9449ea tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 783.021951] env[69328]: INFO nova.compute.manager [None req-5c10a2ba-3a2a-429c-9b99-15e12f9449ea tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] Took 1.16 seconds to destroy the instance on the hypervisor. [ 783.022154] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5c10a2ba-3a2a-429c-9b99-15e12f9449ea tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 783.022368] env[69328]: DEBUG nova.compute.manager [-] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 783.022461] env[69328]: DEBUG nova.network.neutron [-] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 783.031047] env[69328]: DEBUG oslo_vmware.api [None req-03d92c24-0877-49f9-afeb-4a9a12526153 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Waiting for the task: (returnval){ [ 783.031047] env[69328]: value = "task-3273206" [ 783.031047] env[69328]: _type = "Task" [ 783.031047] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.040639] env[69328]: DEBUG oslo_vmware.api [None req-03d92c24-0877-49f9-afeb-4a9a12526153 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3273206, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.069200] env[69328]: DEBUG nova.compute.manager [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 783.079619] env[69328]: DEBUG nova.compute.manager [None req-73a0efe5-4703-45e0-b6b4-5e88cb572f24 tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] [instance: f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe] Instance disappeared during snapshot {{(pid=69328) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 783.081986] env[69328]: DEBUG oslo_concurrency.lockutils [None req-da255cd1-6657-45d4-a96b-d26ccab796c8 tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Acquiring lock "refresh_cache-f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 783.082260] env[69328]: DEBUG oslo_concurrency.lockutils [None req-da255cd1-6657-45d4-a96b-d26ccab796c8 tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Acquired lock "refresh_cache-f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 783.082453] env[69328]: DEBUG nova.network.neutron [None req-da255cd1-6657-45d4-a96b-d26ccab796c8 tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] [instance: f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 783.089471] env[69328]: DEBUG oslo_vmware.api [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Task: {'id': task-3273204, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.092552] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "36f6aab5-2774-402b-9db6-9912f2d5d473" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 783.092758] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "36f6aab5-2774-402b-9db6-9912f2d5d473" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 783.107400] env[69328]: DEBUG nova.virt.hardware [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=1,extra_specs={hw_rng:allowed='True'},flavorid='639762584',id=24,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_1-732788996',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 783.107681] env[69328]: DEBUG nova.virt.hardware [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 783.107848] env[69328]: DEBUG nova.virt.hardware [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 783.109072] env[69328]: DEBUG nova.virt.hardware [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 783.109320] env[69328]: DEBUG nova.virt.hardware [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 783.109507] env[69328]: DEBUG nova.virt.hardware [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 783.110572] env[69328]: DEBUG nova.virt.hardware [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 783.110572] env[69328]: DEBUG nova.virt.hardware [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 783.110572] env[69328]: DEBUG nova.virt.hardware [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 783.110572] env[69328]: DEBUG nova.virt.hardware [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 783.110778] env[69328]: DEBUG nova.virt.hardware [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 783.113912] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddde2160-6ffe-4699-92b1-01ee13863fa2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.127809] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3800774-2a5a-48d4-a912-09c6b601a027 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.197093] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fd9effd8-506c-4bc3-8d83-942135c74d2b tempest-ServerShowV257Test-890624642 tempest-ServerShowV257Test-890624642-project-member] Lock "732342ea-2f73-40ea-a826-883ddc7a385a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.048s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 783.241429] env[69328]: DEBUG nova.compute.manager [req-5e904407-2a11-4667-9a63-db590c6587fb req-0293dd6d-eb3c-4bce-8d29-6fd55b9772f6 service nova] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Received event network-changed-32db9785-1822-4acf-9971-06db92f35c18 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 783.241429] env[69328]: DEBUG nova.compute.manager [req-5e904407-2a11-4667-9a63-db590c6587fb req-0293dd6d-eb3c-4bce-8d29-6fd55b9772f6 service nova] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Refreshing instance network info cache due to event network-changed-32db9785-1822-4acf-9971-06db92f35c18. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 783.241718] env[69328]: DEBUG oslo_concurrency.lockutils [req-5e904407-2a11-4667-9a63-db590c6587fb req-0293dd6d-eb3c-4bce-8d29-6fd55b9772f6 service nova] Acquiring lock "refresh_cache-25fb207b-9388-4198-bb48-ab7cebd43375" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 783.242864] env[69328]: DEBUG oslo_concurrency.lockutils [req-5e904407-2a11-4667-9a63-db590c6587fb req-0293dd6d-eb3c-4bce-8d29-6fd55b9772f6 service nova] Acquired lock "refresh_cache-25fb207b-9388-4198-bb48-ab7cebd43375" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 783.242864] env[69328]: DEBUG nova.network.neutron [req-5e904407-2a11-4667-9a63-db590c6587fb req-0293dd6d-eb3c-4bce-8d29-6fd55b9772f6 service nova] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Refreshing network info cache for port 32db9785-1822-4acf-9971-06db92f35c18 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 783.259379] env[69328]: DEBUG nova.compute.manager [None req-73a0efe5-4703-45e0-b6b4-5e88cb572f24 tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] [instance: f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe] Found 0 images (rotation: 2) {{(pid=69328) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 783.308154] env[69328]: DEBUG oslo_vmware.api [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52106ca7-0c0f-5b59-18d3-2b4f17e58932, 'name': SearchDatastore_Task, 'duration_secs': 0.014386} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.308706] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 783.309085] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 783.309468] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 783.310542] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 783.310542] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 783.310793] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6dcc3dca-40ce-4989-92c3-d05fb1be5244 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.329333] env[69328]: DEBUG oslo_vmware.api [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Task: {'id': task-3273205, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.329844] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 783.331254] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 783.331254] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87a9b3c6-c038-4ed7-beb9-741a35f5e7e7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.334022] env[69328]: DEBUG nova.network.neutron [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Successfully updated port: 4ffb2723-2cb7-4f04-8e1b-208a6329288e {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 783.338230] env[69328]: DEBUG oslo_vmware.api [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Waiting for the task: (returnval){ [ 783.338230] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5201f123-e9cd-0949-77ed-0d8fdfa3d27b" [ 783.338230] env[69328]: _type = "Task" [ 783.338230] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.351789] env[69328]: DEBUG oslo_vmware.api [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5201f123-e9cd-0949-77ed-0d8fdfa3d27b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.540498] env[69328]: DEBUG oslo_vmware.api [None req-03d92c24-0877-49f9-afeb-4a9a12526153 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3273206, 'name': PowerOnVM_Task, 'duration_secs': 0.437688} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.540769] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-03d92c24-0877-49f9-afeb-4a9a12526153 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 783.543858] env[69328]: DEBUG nova.compute.manager [None req-03d92c24-0877-49f9-afeb-4a9a12526153 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 783.544456] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a938333-6244-4950-8730-626c42af7bfe {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.580913] env[69328]: DEBUG oslo_vmware.api [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Task: {'id': task-3273204, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.622828} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.581293] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4/8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 783.581510] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 783.581772] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4ea0aabc-11a4-4e21-9d4d-8dc403dd8e9f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.588815] env[69328]: DEBUG oslo_vmware.api [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Waiting for the task: (returnval){ [ 783.588815] env[69328]: value = "task-3273207" [ 783.588815] env[69328]: _type = "Task" [ 783.588815] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.599017] env[69328]: DEBUG oslo_vmware.api [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Task: {'id': task-3273207, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.619578] env[69328]: DEBUG nova.network.neutron [None req-da255cd1-6657-45d4-a96b-d26ccab796c8 tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] [instance: f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 783.667016] env[69328]: DEBUG nova.compute.manager [req-a1322cd7-c389-47fb-ac6d-94b1e70a0642 req-2bf9bce9-f894-469c-8206-947234c53c06 service nova] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] Received event network-vif-deleted-d7451c82-01e6-4e9f-bfbc-e873dbf7896a {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 783.667170] env[69328]: INFO nova.compute.manager [req-a1322cd7-c389-47fb-ac6d-94b1e70a0642 req-2bf9bce9-f894-469c-8206-947234c53c06 service nova] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] Neutron deleted interface d7451c82-01e6-4e9f-bfbc-e873dbf7896a; detaching it from the instance and deleting it from the info cache [ 783.667344] env[69328]: DEBUG nova.network.neutron [req-a1322cd7-c389-47fb-ac6d-94b1e70a0642 req-2bf9bce9-f894-469c-8206-947234c53c06 service nova] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 783.677071] env[69328]: DEBUG nova.network.neutron [None req-da255cd1-6657-45d4-a96b-d26ccab796c8 tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] [instance: f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 783.830345] env[69328]: DEBUG oslo_vmware.api [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Task: {'id': task-3273205, 'name': PowerOnVM_Task, 'duration_secs': 0.599109} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.834326] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 783.834326] env[69328]: INFO nova.compute.manager [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Took 12.25 seconds to spawn the instance on the hypervisor. [ 783.834326] env[69328]: DEBUG nova.compute.manager [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 783.834326] env[69328]: DEBUG nova.network.neutron [-] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 783.835546] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-761b1c03-adca-4980-aaa1-0607d47089af {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.841723] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Acquiring lock "refresh_cache-3b4b6687-fb6d-4bb7-8604-20a3ba706ff3" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 783.841723] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Acquired lock "refresh_cache-3b4b6687-fb6d-4bb7-8604-20a3ba706ff3" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 783.841723] env[69328]: DEBUG nova.network.neutron [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 783.863333] env[69328]: DEBUG oslo_vmware.api [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5201f123-e9cd-0949-77ed-0d8fdfa3d27b, 'name': SearchDatastore_Task, 'duration_secs': 0.015201} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.864349] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f232bf2-ca8d-4b7a-878e-f97059793456 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.871567] env[69328]: DEBUG oslo_vmware.api [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Waiting for the task: (returnval){ [ 783.871567] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52cc45fa-4693-714f-18cf-7473e525e86b" [ 783.871567] env[69328]: _type = "Task" [ 783.871567] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.882036] env[69328]: DEBUG oslo_vmware.api [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52cc45fa-4693-714f-18cf-7473e525e86b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.052272] env[69328]: DEBUG nova.network.neutron [req-5e904407-2a11-4667-9a63-db590c6587fb req-0293dd6d-eb3c-4bce-8d29-6fd55b9772f6 service nova] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Updated VIF entry in instance network info cache for port 32db9785-1822-4acf-9971-06db92f35c18. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 784.052671] env[69328]: DEBUG nova.network.neutron [req-5e904407-2a11-4667-9a63-db590c6587fb req-0293dd6d-eb3c-4bce-8d29-6fd55b9772f6 service nova] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Updating instance_info_cache with network_info: [{"id": "32db9785-1822-4acf-9971-06db92f35c18", "address": "fa:16:3e:3e:aa:b8", "network": {"id": "4707aea3-ce46-4fe0-bf5c-141ee5596a86", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.119", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ecd184c6b78b4e4297fb93abb94aa37d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32db9785-18", "ovs_interfaceid": "32db9785-1822-4acf-9971-06db92f35c18", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 784.098700] env[69328]: DEBUG oslo_vmware.api [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Task: {'id': task-3273207, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.248795} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.098817] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 784.100190] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b92fc86b-9e48-42d6-bef3-336f4d4d064c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.128009] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Reconfiguring VM instance instance-0000002e to attach disk [datastore1] 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4/8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 784.131589] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0176301e-8c04-4f07-a378-54beeb5b5dd5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.154772] env[69328]: DEBUG oslo_vmware.api [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Waiting for the task: (returnval){ [ 784.154772] env[69328]: value = "task-3273208" [ 784.154772] env[69328]: _type = "Task" [ 784.154772] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.167169] env[69328]: DEBUG oslo_vmware.api [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Task: {'id': task-3273208, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.169882] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-93b31620-8196-416d-b1af-885519bd40f7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.179409] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e57613e3-0798-4cd3-9674-26540f5b1846 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.194479] env[69328]: DEBUG oslo_concurrency.lockutils [None req-da255cd1-6657-45d4-a96b-d26ccab796c8 tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Releasing lock "refresh_cache-f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 784.194479] env[69328]: DEBUG nova.compute.manager [None req-da255cd1-6657-45d4-a96b-d26ccab796c8 tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] [instance: f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 784.194479] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-da255cd1-6657-45d4-a96b-d26ccab796c8 tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] [instance: f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 784.200508] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0dda0f0-223d-4aa5-837f-9783d6b56d27 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.204984] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-da255cd1-6657-45d4-a96b-d26ccab796c8 tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] [instance: f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 784.205142] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-997b8846-5934-4d9b-ae06-273ed2e898a9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.214123] env[69328]: DEBUG oslo_vmware.api [None req-da255cd1-6657-45d4-a96b-d26ccab796c8 tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Waiting for the task: (returnval){ [ 784.214123] env[69328]: value = "task-3273209" [ 784.214123] env[69328]: _type = "Task" [ 784.214123] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.224495] env[69328]: DEBUG nova.compute.manager [req-a1322cd7-c389-47fb-ac6d-94b1e70a0642 req-2bf9bce9-f894-469c-8206-947234c53c06 service nova] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] Detach interface failed, port_id=d7451c82-01e6-4e9f-bfbc-e873dbf7896a, reason: Instance d724a141-35e7-4483-99aa-8a17066fb63b could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 784.239077] env[69328]: DEBUG oslo_vmware.api [None req-da255cd1-6657-45d4-a96b-d26ccab796c8 tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Task: {'id': task-3273209, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.245981] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5b0be4d-ab1d-4649-acee-662e7b7283b6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.253676] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d59425ea-7d89-4ca7-b7ac-9353479cb011 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.286033] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6fc9c25-d06f-4025-a0d9-aff3fa84b531 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.294366] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-650492dc-9868-475e-b8f8-4e9839e975f3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.309668] env[69328]: DEBUG nova.compute.provider_tree [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 784.338865] env[69328]: INFO nova.compute.manager [-] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] Took 1.32 seconds to deallocate network for instance. [ 784.365950] env[69328]: INFO nova.compute.manager [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Took 53.50 seconds to build instance. [ 784.384019] env[69328]: DEBUG oslo_vmware.api [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52cc45fa-4693-714f-18cf-7473e525e86b, 'name': SearchDatastore_Task, 'duration_secs': 0.08496} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.385044] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 784.385331] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 25fb207b-9388-4198-bb48-ab7cebd43375/25fb207b-9388-4198-bb48-ab7cebd43375.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 784.385650] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2fba712c-d41d-4d37-86d3-e7aa9fbc625a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.392942] env[69328]: DEBUG oslo_vmware.api [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Waiting for the task: (returnval){ [ 784.392942] env[69328]: value = "task-3273210" [ 784.392942] env[69328]: _type = "Task" [ 784.392942] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.401967] env[69328]: DEBUG oslo_vmware.api [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3273210, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.410118] env[69328]: DEBUG nova.network.neutron [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 784.562411] env[69328]: DEBUG oslo_concurrency.lockutils [req-5e904407-2a11-4667-9a63-db590c6587fb req-0293dd6d-eb3c-4bce-8d29-6fd55b9772f6 service nova] Releasing lock "refresh_cache-25fb207b-9388-4198-bb48-ab7cebd43375" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 784.667974] env[69328]: DEBUG oslo_vmware.api [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Task: {'id': task-3273208, 'name': ReconfigVM_Task, 'duration_secs': 0.368723} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.672130] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Reconfigured VM instance instance-0000002e to attach disk [datastore1] 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4/8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 784.672130] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-17d0154a-0eef-4fa7-b0ef-c3157438f8c7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.677273] env[69328]: DEBUG oslo_vmware.api [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Waiting for the task: (returnval){ [ 784.677273] env[69328]: value = "task-3273211" [ 784.677273] env[69328]: _type = "Task" [ 784.677273] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.686787] env[69328]: DEBUG oslo_vmware.api [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Task: {'id': task-3273211, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.734227] env[69328]: DEBUG oslo_vmware.api [None req-da255cd1-6657-45d4-a96b-d26ccab796c8 tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Task: {'id': task-3273209, 'name': PowerOffVM_Task, 'duration_secs': 0.131567} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.735043] env[69328]: DEBUG nova.network.neutron [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Updating instance_info_cache with network_info: [{"id": "4ffb2723-2cb7-4f04-8e1b-208a6329288e", "address": "fa:16:3e:ad:66:bb", "network": {"id": "023046e8-e113-4ce9-95d7-1c04fc034ba6", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-89845670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1467d48a61f7410b8f6d5a981d169563", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ffb2723-2c", "ovs_interfaceid": "4ffb2723-2cb7-4f04-8e1b-208a6329288e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 784.736460] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-da255cd1-6657-45d4-a96b-d26ccab796c8 tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] [instance: f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 784.738096] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-da255cd1-6657-45d4-a96b-d26ccab796c8 tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] [instance: f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 784.741756] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ac135efd-4e6e-47e9-9943-770393e7966c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.769027] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-da255cd1-6657-45d4-a96b-d26ccab796c8 tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] [instance: f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 784.769353] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-da255cd1-6657-45d4-a96b-d26ccab796c8 tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] [instance: f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 784.769553] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-da255cd1-6657-45d4-a96b-d26ccab796c8 tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Deleting the datastore file [datastore1] f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 784.769828] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d38fe9a1-83c2-4d50-b583-fe7840a536c8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.778513] env[69328]: DEBUG oslo_vmware.api [None req-da255cd1-6657-45d4-a96b-d26ccab796c8 tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Waiting for the task: (returnval){ [ 784.778513] env[69328]: value = "task-3273213" [ 784.778513] env[69328]: _type = "Task" [ 784.778513] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.787893] env[69328]: DEBUG oslo_vmware.api [None req-da255cd1-6657-45d4-a96b-d26ccab796c8 tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Task: {'id': task-3273213, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.813211] env[69328]: DEBUG nova.scheduler.client.report [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 784.849977] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5c10a2ba-3a2a-429c-9b99-15e12f9449ea tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 784.869095] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6226e1ef-81ec-4aac-800a-e8883a0d9364 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Lock "18022645-9a2a-489e-b0b1-486165f46f14" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 100.627s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 784.915495] env[69328]: DEBUG oslo_vmware.api [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3273210, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.480729} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.915495] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 25fb207b-9388-4198-bb48-ab7cebd43375/25fb207b-9388-4198-bb48-ab7cebd43375.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 784.915495] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 784.915495] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-196349de-a323-4bb4-be14-bc49a94975d5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.922661] env[69328]: DEBUG oslo_vmware.api [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Waiting for the task: (returnval){ [ 784.922661] env[69328]: value = "task-3273214" [ 784.922661] env[69328]: _type = "Task" [ 784.922661] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.932255] env[69328]: DEBUG oslo_vmware.api [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3273214, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.194551] env[69328]: DEBUG oslo_vmware.api [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Task: {'id': task-3273211, 'name': Rename_Task, 'duration_secs': 0.284254} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.195074] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 785.195549] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f9248940-6f9d-4839-b197-96b05aa9ddf3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.205179] env[69328]: DEBUG oslo_vmware.api [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Waiting for the task: (returnval){ [ 785.205179] env[69328]: value = "task-3273215" [ 785.205179] env[69328]: _type = "Task" [ 785.205179] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.215518] env[69328]: DEBUG oslo_vmware.api [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Task: {'id': task-3273215, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.243191] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Releasing lock "refresh_cache-3b4b6687-fb6d-4bb7-8604-20a3ba706ff3" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 785.243191] env[69328]: DEBUG nova.compute.manager [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Instance network_info: |[{"id": "4ffb2723-2cb7-4f04-8e1b-208a6329288e", "address": "fa:16:3e:ad:66:bb", "network": {"id": "023046e8-e113-4ce9-95d7-1c04fc034ba6", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-89845670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1467d48a61f7410b8f6d5a981d169563", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ffb2723-2c", "ovs_interfaceid": "4ffb2723-2cb7-4f04-8e1b-208a6329288e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 785.243416] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ad:66:bb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0f6d1427-d86b-4371-9172-50e4bb0eb1cb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4ffb2723-2cb7-4f04-8e1b-208a6329288e', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 785.251043] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 785.251307] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 785.251944] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-37fba82a-6a7e-4c03-93d9-8128fcbd8bbf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.271790] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 785.271790] env[69328]: value = "task-3273216" [ 785.271790] env[69328]: _type = "Task" [ 785.271790] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.279607] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273216, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.287340] env[69328]: DEBUG oslo_vmware.api [None req-da255cd1-6657-45d4-a96b-d26ccab796c8 tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Task: {'id': task-3273213, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.193048} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.287594] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-da255cd1-6657-45d4-a96b-d26ccab796c8 tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 785.287777] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-da255cd1-6657-45d4-a96b-d26ccab796c8 tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] [instance: f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 785.288136] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-da255cd1-6657-45d4-a96b-d26ccab796c8 tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] [instance: f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 785.288357] env[69328]: INFO nova.compute.manager [None req-da255cd1-6657-45d4-a96b-d26ccab796c8 tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] [instance: f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe] Took 1.09 seconds to destroy the instance on the hypervisor. [ 785.288598] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-da255cd1-6657-45d4-a96b-d26ccab796c8 tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 785.288790] env[69328]: DEBUG nova.compute.manager [-] [instance: f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 785.288883] env[69328]: DEBUG nova.network.neutron [-] [instance: f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 785.311815] env[69328]: DEBUG nova.network.neutron [-] [instance: f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 785.319361] env[69328]: DEBUG oslo_concurrency.lockutils [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.656s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 785.322019] env[69328]: DEBUG nova.compute.manager [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 785.322496] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7ff24e32-9bdb-4e6c-85cb-d89873012504 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.198s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 785.322817] env[69328]: DEBUG nova.objects.instance [None req-7ff24e32-9bdb-4e6c-85cb-d89873012504 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Lazy-loading 'resources' on Instance uuid 3923403b-2e8f-4033-89ee-9a907aff1d49 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 785.372476] env[69328]: DEBUG nova.compute.manager [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 785.432600] env[69328]: DEBUG oslo_vmware.api [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3273214, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086864} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.432727] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 785.433515] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb17b79a-eac6-48a5-9356-a9f37654a151 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.462682] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Reconfiguring VM instance instance-0000002f to attach disk [datastore1] 25fb207b-9388-4198-bb48-ab7cebd43375/25fb207b-9388-4198-bb48-ab7cebd43375.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 785.462682] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ce462384-4a7a-46b4-bef4-48196bc66985 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.488022] env[69328]: DEBUG oslo_vmware.api [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Waiting for the task: (returnval){ [ 785.488022] env[69328]: value = "task-3273217" [ 785.488022] env[69328]: _type = "Task" [ 785.488022] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.494636] env[69328]: DEBUG oslo_vmware.api [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3273217, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.587161] env[69328]: DEBUG nova.compute.manager [req-dd50197b-155c-49ed-a0e9-1ff810796511 req-e7022537-4499-44ad-b1cb-2c322d07009a service nova] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Received event network-vif-plugged-4ffb2723-2cb7-4f04-8e1b-208a6329288e {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 785.587442] env[69328]: DEBUG oslo_concurrency.lockutils [req-dd50197b-155c-49ed-a0e9-1ff810796511 req-e7022537-4499-44ad-b1cb-2c322d07009a service nova] Acquiring lock "3b4b6687-fb6d-4bb7-8604-20a3ba706ff3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 785.587697] env[69328]: DEBUG oslo_concurrency.lockutils [req-dd50197b-155c-49ed-a0e9-1ff810796511 req-e7022537-4499-44ad-b1cb-2c322d07009a service nova] Lock "3b4b6687-fb6d-4bb7-8604-20a3ba706ff3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 785.587907] env[69328]: DEBUG oslo_concurrency.lockutils [req-dd50197b-155c-49ed-a0e9-1ff810796511 req-e7022537-4499-44ad-b1cb-2c322d07009a service nova] Lock "3b4b6687-fb6d-4bb7-8604-20a3ba706ff3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 785.588267] env[69328]: DEBUG nova.compute.manager [req-dd50197b-155c-49ed-a0e9-1ff810796511 req-e7022537-4499-44ad-b1cb-2c322d07009a service nova] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] No waiting events found dispatching network-vif-plugged-4ffb2723-2cb7-4f04-8e1b-208a6329288e {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 785.588642] env[69328]: WARNING nova.compute.manager [req-dd50197b-155c-49ed-a0e9-1ff810796511 req-e7022537-4499-44ad-b1cb-2c322d07009a service nova] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Received unexpected event network-vif-plugged-4ffb2723-2cb7-4f04-8e1b-208a6329288e for instance with vm_state building and task_state spawning. [ 785.588719] env[69328]: DEBUG nova.compute.manager [req-dd50197b-155c-49ed-a0e9-1ff810796511 req-e7022537-4499-44ad-b1cb-2c322d07009a service nova] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Received event network-changed-4ffb2723-2cb7-4f04-8e1b-208a6329288e {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 785.588913] env[69328]: DEBUG nova.compute.manager [req-dd50197b-155c-49ed-a0e9-1ff810796511 req-e7022537-4499-44ad-b1cb-2c322d07009a service nova] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Refreshing instance network info cache due to event network-changed-4ffb2723-2cb7-4f04-8e1b-208a6329288e. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 785.589179] env[69328]: DEBUG oslo_concurrency.lockutils [req-dd50197b-155c-49ed-a0e9-1ff810796511 req-e7022537-4499-44ad-b1cb-2c322d07009a service nova] Acquiring lock "refresh_cache-3b4b6687-fb6d-4bb7-8604-20a3ba706ff3" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 785.589409] env[69328]: DEBUG oslo_concurrency.lockutils [req-dd50197b-155c-49ed-a0e9-1ff810796511 req-e7022537-4499-44ad-b1cb-2c322d07009a service nova] Acquired lock "refresh_cache-3b4b6687-fb6d-4bb7-8604-20a3ba706ff3" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 785.589555] env[69328]: DEBUG nova.network.neutron [req-dd50197b-155c-49ed-a0e9-1ff810796511 req-e7022537-4499-44ad-b1cb-2c322d07009a service nova] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Refreshing network info cache for port 4ffb2723-2cb7-4f04-8e1b-208a6329288e {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 785.713050] env[69328]: DEBUG oslo_vmware.api [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Task: {'id': task-3273215, 'name': PowerOnVM_Task, 'duration_secs': 0.474139} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.713355] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 785.713604] env[69328]: INFO nova.compute.manager [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Took 9.75 seconds to spawn the instance on the hypervisor. [ 785.713789] env[69328]: DEBUG nova.compute.manager [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 785.714755] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17da6514-8e89-437b-9ae0-eea4dffecbb0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.790272] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273216, 'name': CreateVM_Task, 'duration_secs': 0.417651} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.790272] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 785.791253] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 785.792021] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 785.793194] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 785.793877] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d2f3c1a-221b-4612-9f88-fb890b004064 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.800207] env[69328]: DEBUG oslo_vmware.api [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Waiting for the task: (returnval){ [ 785.800207] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e6f902-dd46-ab32-2c02-5b0198883d54" [ 785.800207] env[69328]: _type = "Task" [ 785.800207] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.810675] env[69328]: DEBUG oslo_vmware.api [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e6f902-dd46-ab32-2c02-5b0198883d54, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.815813] env[69328]: DEBUG nova.network.neutron [-] [instance: f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 785.827416] env[69328]: DEBUG nova.compute.utils [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 785.831872] env[69328]: DEBUG nova.compute.manager [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 785.832460] env[69328]: DEBUG nova.network.neutron [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 785.895977] env[69328]: DEBUG nova.policy [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1685bb9a09d84a7a92306c64f0e5895e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '75d5853e3c724d02bacfa75173e38ab3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 785.898370] env[69328]: DEBUG oslo_concurrency.lockutils [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 786.000110] env[69328]: DEBUG oslo_vmware.api [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3273217, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.233278] env[69328]: INFO nova.compute.manager [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Took 48.38 seconds to build instance. [ 786.282751] env[69328]: INFO nova.compute.manager [None req-4d7312a4-9582-48df-bf50-43069609f258 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Unrescuing [ 786.283249] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4d7312a4-9582-48df-bf50-43069609f258 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Acquiring lock "refresh_cache-99e31dfd-5d41-4564-886f-becc25ca289c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.283249] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4d7312a4-9582-48df-bf50-43069609f258 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Acquired lock "refresh_cache-99e31dfd-5d41-4564-886f-becc25ca289c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 786.283421] env[69328]: DEBUG nova.network.neutron [None req-4d7312a4-9582-48df-bf50-43069609f258 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 786.317103] env[69328]: DEBUG oslo_vmware.api [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e6f902-dd46-ab32-2c02-5b0198883d54, 'name': SearchDatastore_Task, 'duration_secs': 0.01352} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.318021] env[69328]: INFO nova.compute.manager [-] [instance: f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe] Took 1.03 seconds to deallocate network for instance. [ 786.318021] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 786.318021] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 786.318244] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.319048] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 786.319048] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 786.320176] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3e8eab13-915f-4bd8-a227-a9d6617573a8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.329714] env[69328]: DEBUG nova.compute.manager [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 786.345261] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 786.345375] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 786.346119] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd3b36a4-81c6-45cd-9325-689a1815350e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.357803] env[69328]: DEBUG oslo_vmware.api [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Waiting for the task: (returnval){ [ 786.357803] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5294e5ca-342b-5112-41fe-82ef4eef1780" [ 786.357803] env[69328]: _type = "Task" [ 786.357803] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.370986] env[69328]: DEBUG oslo_vmware.api [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5294e5ca-342b-5112-41fe-82ef4eef1780, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.372321] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7f1387d-586a-4b34-b297-89858eb28436 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.380463] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32070db7-848a-46e3-bd17-90ff6a46af06 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.417095] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e3394c8-5bd7-45cd-95af-043192e097ee {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.425869] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d050376-8a60-46ab-9eb6-fd827cbef9d4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.441414] env[69328]: DEBUG nova.compute.provider_tree [None req-7ff24e32-9bdb-4e6c-85cb-d89873012504 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 786.498888] env[69328]: DEBUG oslo_vmware.api [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3273217, 'name': ReconfigVM_Task, 'duration_secs': 0.537008} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.499423] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Reconfigured VM instance instance-0000002f to attach disk [datastore1] 25fb207b-9388-4198-bb48-ab7cebd43375/25fb207b-9388-4198-bb48-ab7cebd43375.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 786.500401] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9ca4580a-6b9b-48e5-867c-9bec570be312 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.506792] env[69328]: DEBUG oslo_vmware.api [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Waiting for the task: (returnval){ [ 786.506792] env[69328]: value = "task-3273218" [ 786.506792] env[69328]: _type = "Task" [ 786.506792] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.516090] env[69328]: DEBUG oslo_vmware.api [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3273218, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.546398] env[69328]: DEBUG nova.network.neutron [req-dd50197b-155c-49ed-a0e9-1ff810796511 req-e7022537-4499-44ad-b1cb-2c322d07009a service nova] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Updated VIF entry in instance network info cache for port 4ffb2723-2cb7-4f04-8e1b-208a6329288e. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 786.546794] env[69328]: DEBUG nova.network.neutron [req-dd50197b-155c-49ed-a0e9-1ff810796511 req-e7022537-4499-44ad-b1cb-2c322d07009a service nova] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Updating instance_info_cache with network_info: [{"id": "4ffb2723-2cb7-4f04-8e1b-208a6329288e", "address": "fa:16:3e:ad:66:bb", "network": {"id": "023046e8-e113-4ce9-95d7-1c04fc034ba6", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-89845670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1467d48a61f7410b8f6d5a981d169563", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ffb2723-2c", "ovs_interfaceid": "4ffb2723-2cb7-4f04-8e1b-208a6329288e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 786.744028] env[69328]: DEBUG oslo_concurrency.lockutils [None req-39068645-5f70-40c8-a65d-bf526736e910 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Lock "8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 89.559s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 786.820534] env[69328]: DEBUG nova.network.neutron [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Successfully created port: 7e9163b1-a349-4287-bbfe-8147dc2e52dd {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 786.835734] env[69328]: DEBUG oslo_concurrency.lockutils [None req-da255cd1-6657-45d4-a96b-d26ccab796c8 tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 786.876239] env[69328]: DEBUG oslo_vmware.api [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5294e5ca-342b-5112-41fe-82ef4eef1780, 'name': SearchDatastore_Task, 'duration_secs': 0.023545} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.876239] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75da15c0-a676-40f0-9fc0-b417f5f55615 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.880306] env[69328]: DEBUG oslo_vmware.api [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Waiting for the task: (returnval){ [ 786.880306] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e4133b-be6a-4a01-1f44-9e99827f06c6" [ 786.880306] env[69328]: _type = "Task" [ 786.880306] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.889102] env[69328]: DEBUG oslo_vmware.api [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e4133b-be6a-4a01-1f44-9e99827f06c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.971995] env[69328]: ERROR nova.scheduler.client.report [None req-7ff24e32-9bdb-4e6c-85cb-d89873012504 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [req-52f47827-34d6-4070-94e4-da5f44f37939] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-52f47827-34d6-4070-94e4-da5f44f37939"}]} [ 786.993978] env[69328]: DEBUG nova.scheduler.client.report [None req-7ff24e32-9bdb-4e6c-85cb-d89873012504 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Refreshing inventories for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 787.007884] env[69328]: DEBUG nova.scheduler.client.report [None req-7ff24e32-9bdb-4e6c-85cb-d89873012504 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Updating ProviderTree inventory for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 787.007884] env[69328]: DEBUG nova.compute.provider_tree [None req-7ff24e32-9bdb-4e6c-85cb-d89873012504 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 787.021171] env[69328]: DEBUG oslo_vmware.api [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3273218, 'name': Rename_Task, 'duration_secs': 0.371056} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.025857] env[69328]: DEBUG nova.scheduler.client.report [None req-7ff24e32-9bdb-4e6c-85cb-d89873012504 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Refreshing aggregate associations for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e, aggregates: None {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 787.027878] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 787.028460] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1dedc231-b9cf-4256-b939-22aa95c5892a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.035321] env[69328]: DEBUG oslo_vmware.api [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Waiting for the task: (returnval){ [ 787.035321] env[69328]: value = "task-3273219" [ 787.035321] env[69328]: _type = "Task" [ 787.035321] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.046815] env[69328]: DEBUG oslo_vmware.api [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3273219, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.054018] env[69328]: DEBUG oslo_concurrency.lockutils [req-dd50197b-155c-49ed-a0e9-1ff810796511 req-e7022537-4499-44ad-b1cb-2c322d07009a service nova] Releasing lock "refresh_cache-3b4b6687-fb6d-4bb7-8604-20a3ba706ff3" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 787.054018] env[69328]: DEBUG nova.scheduler.client.report [None req-7ff24e32-9bdb-4e6c-85cb-d89873012504 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Refreshing trait associations for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e, traits: COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 787.214304] env[69328]: DEBUG nova.network.neutron [None req-4d7312a4-9582-48df-bf50-43069609f258 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Updating instance_info_cache with network_info: [{"id": "90f7115d-cbd5-42dd-a07a-5eb45deb5276", "address": "fa:16:3e:22:47:ec", "network": {"id": "032910e6-4d2e-415b-ac3e-ed7a7fadf536", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1432969230-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "690511a8725a4dd6ab796a15569293a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "be5c038c-29e5-43c9-91ab-9eb3094b5337", "external-id": "nsx-vlan-transportzone-511", "segmentation_id": 511, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90f7115d-cb", "ovs_interfaceid": "90f7115d-cbd5-42dd-a07a-5eb45deb5276", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.246937] env[69328]: DEBUG nova.compute.manager [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 787.342678] env[69328]: DEBUG nova.compute.manager [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 787.376157] env[69328]: DEBUG nova.virt.hardware [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 787.376724] env[69328]: DEBUG nova.virt.hardware [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 787.377105] env[69328]: DEBUG nova.virt.hardware [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 787.377419] env[69328]: DEBUG nova.virt.hardware [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 787.377679] env[69328]: DEBUG nova.virt.hardware [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 787.378014] env[69328]: DEBUG nova.virt.hardware [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 787.378403] env[69328]: DEBUG nova.virt.hardware [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 787.378701] env[69328]: DEBUG nova.virt.hardware [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 787.378983] env[69328]: DEBUG nova.virt.hardware [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 787.379288] env[69328]: DEBUG nova.virt.hardware [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 787.379575] env[69328]: DEBUG nova.virt.hardware [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 787.380590] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6c39971-685f-420d-9f8b-e1833b4aea3b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.400185] env[69328]: DEBUG oslo_vmware.api [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e4133b-be6a-4a01-1f44-9e99827f06c6, 'name': SearchDatastore_Task, 'duration_secs': 0.025023} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.400692] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 787.401089] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3/3b4b6687-fb6d-4bb7-8604-20a3ba706ff3.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 787.402757] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6424baf6-0726-40ee-b339-a510c3786119 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.409710] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7e9805ff-55aa-46d9-bec2-8afdb475f41c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.426139] env[69328]: DEBUG oslo_vmware.api [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Waiting for the task: (returnval){ [ 787.426139] env[69328]: value = "task-3273220" [ 787.426139] env[69328]: _type = "Task" [ 787.426139] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.441777] env[69328]: DEBUG oslo_vmware.api [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Task: {'id': task-3273220, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.547222] env[69328]: DEBUG oslo_vmware.api [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3273219, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.578997] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4113d5d4-3a9f-4ef0-aa8a-099b65560547 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.588045] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54d9ce37-2a15-4981-b3aa-e470af00240a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.630079] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb92be46-81d7-4ced-91ae-989f2f1d2707 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.639598] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a0afefc-53b5-4ef2-9290-4990dc24a8fe {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.655917] env[69328]: DEBUG nova.compute.provider_tree [None req-7ff24e32-9bdb-4e6c-85cb-d89873012504 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 787.660379] env[69328]: DEBUG nova.compute.manager [req-9eb0647b-9818-4f83-892e-45aa7eec8f62 req-30311066-fa28-4808-8e48-8181f7c04b8b service nova] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Received event network-changed-fbe60697-372d-45c9-97c0-49ce01cbc064 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 787.660379] env[69328]: DEBUG nova.compute.manager [req-9eb0647b-9818-4f83-892e-45aa7eec8f62 req-30311066-fa28-4808-8e48-8181f7c04b8b service nova] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Refreshing instance network info cache due to event network-changed-fbe60697-372d-45c9-97c0-49ce01cbc064. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 787.660501] env[69328]: DEBUG oslo_concurrency.lockutils [req-9eb0647b-9818-4f83-892e-45aa7eec8f62 req-30311066-fa28-4808-8e48-8181f7c04b8b service nova] Acquiring lock "refresh_cache-18022645-9a2a-489e-b0b1-486165f46f14" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.660562] env[69328]: DEBUG oslo_concurrency.lockutils [req-9eb0647b-9818-4f83-892e-45aa7eec8f62 req-30311066-fa28-4808-8e48-8181f7c04b8b service nova] Acquired lock "refresh_cache-18022645-9a2a-489e-b0b1-486165f46f14" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 787.661933] env[69328]: DEBUG nova.network.neutron [req-9eb0647b-9818-4f83-892e-45aa7eec8f62 req-30311066-fa28-4808-8e48-8181f7c04b8b service nova] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Refreshing network info cache for port fbe60697-372d-45c9-97c0-49ce01cbc064 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 787.716326] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4d7312a4-9582-48df-bf50-43069609f258 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Releasing lock "refresh_cache-99e31dfd-5d41-4564-886f-becc25ca289c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 787.717199] env[69328]: DEBUG nova.objects.instance [None req-4d7312a4-9582-48df-bf50-43069609f258 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Lazy-loading 'flavor' on Instance uuid 99e31dfd-5d41-4564-886f-becc25ca289c {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 787.783579] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 787.941009] env[69328]: DEBUG oslo_vmware.api [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Task: {'id': task-3273220, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.047259] env[69328]: DEBUG oslo_vmware.api [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3273219, 'name': PowerOnVM_Task, 'duration_secs': 0.750765} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.047600] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 788.047926] env[69328]: INFO nova.compute.manager [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Took 9.26 seconds to spawn the instance on the hypervisor. [ 788.048197] env[69328]: DEBUG nova.compute.manager [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 788.049107] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5adf113-ab0e-4c66-9c09-b01dfe23f8fa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.204977] env[69328]: DEBUG nova.scheduler.client.report [None req-7ff24e32-9bdb-4e6c-85cb-d89873012504 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Updated inventory for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with generation 74 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 788.205283] env[69328]: DEBUG nova.compute.provider_tree [None req-7ff24e32-9bdb-4e6c-85cb-d89873012504 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Updating resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e generation from 74 to 75 during operation: update_inventory {{(pid=69328) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 788.205469] env[69328]: DEBUG nova.compute.provider_tree [None req-7ff24e32-9bdb-4e6c-85cb-d89873012504 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 788.226017] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb1fd752-28ed-4568-b564-18044f46f8a9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.254528] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d7312a4-9582-48df-bf50-43069609f258 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 788.258553] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-deabf17a-5f2e-44b8-b7fe-5452d2d83024 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.267919] env[69328]: DEBUG oslo_vmware.api [None req-4d7312a4-9582-48df-bf50-43069609f258 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Waiting for the task: (returnval){ [ 788.267919] env[69328]: value = "task-3273221" [ 788.267919] env[69328]: _type = "Task" [ 788.267919] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.276463] env[69328]: DEBUG oslo_vmware.api [None req-4d7312a4-9582-48df-bf50-43069609f258 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3273221, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.441259] env[69328]: DEBUG oslo_vmware.api [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Task: {'id': task-3273220, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.537622} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.441578] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3/3b4b6687-fb6d-4bb7-8604-20a3ba706ff3.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 788.441793] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 788.442092] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-619bcfb0-9ab3-42e1-8a18-7b3a50172559 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.451248] env[69328]: DEBUG oslo_vmware.api [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Waiting for the task: (returnval){ [ 788.451248] env[69328]: value = "task-3273222" [ 788.451248] env[69328]: _type = "Task" [ 788.451248] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.455846] env[69328]: DEBUG nova.network.neutron [req-9eb0647b-9818-4f83-892e-45aa7eec8f62 req-30311066-fa28-4808-8e48-8181f7c04b8b service nova] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Updated VIF entry in instance network info cache for port fbe60697-372d-45c9-97c0-49ce01cbc064. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 788.456292] env[69328]: DEBUG nova.network.neutron [req-9eb0647b-9818-4f83-892e-45aa7eec8f62 req-30311066-fa28-4808-8e48-8181f7c04b8b service nova] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Updating instance_info_cache with network_info: [{"id": "fbe60697-372d-45c9-97c0-49ce01cbc064", "address": "fa:16:3e:e7:a0:1b", "network": {"id": "f29114eb-6d33-4dd9-8c6c-f817e73e3761", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1738105219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0ef63e916e324066a8feacfe8a4b6358", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7654928b-7afe-42e3-a18d-68ecc775cefe", "external-id": "cl2-zone-807", "segmentation_id": 807, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbe60697-37", "ovs_interfaceid": "fbe60697-372d-45c9-97c0-49ce01cbc064", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 788.464383] env[69328]: DEBUG oslo_vmware.api [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Task: {'id': task-3273222, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.570915] env[69328]: INFO nova.compute.manager [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Took 43.43 seconds to build instance. [ 788.710887] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7ff24e32-9bdb-4e6c-85cb-d89873012504 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.388s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 788.712924] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.148s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 788.715868] env[69328]: INFO nova.compute.claims [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 788.740858] env[69328]: INFO nova.scheduler.client.report [None req-7ff24e32-9bdb-4e6c-85cb-d89873012504 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Deleted allocations for instance 3923403b-2e8f-4033-89ee-9a907aff1d49 [ 788.780810] env[69328]: DEBUG oslo_vmware.api [None req-4d7312a4-9582-48df-bf50-43069609f258 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3273221, 'name': PowerOffVM_Task, 'duration_secs': 0.222477} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.781119] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d7312a4-9582-48df-bf50-43069609f258 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 788.786767] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d7312a4-9582-48df-bf50-43069609f258 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Reconfiguring VM instance instance-0000002a to detach disk 2001 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 788.787364] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-61a692e0-7bfa-42a2-9f2b-4dd3fb8df807 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.809685] env[69328]: DEBUG oslo_vmware.api [None req-4d7312a4-9582-48df-bf50-43069609f258 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Waiting for the task: (returnval){ [ 788.809685] env[69328]: value = "task-3273223" [ 788.809685] env[69328]: _type = "Task" [ 788.809685] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.822589] env[69328]: DEBUG oslo_vmware.api [None req-4d7312a4-9582-48df-bf50-43069609f258 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3273223, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.867785] env[69328]: DEBUG nova.network.neutron [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Successfully updated port: 7e9163b1-a349-4287-bbfe-8147dc2e52dd {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 788.959869] env[69328]: DEBUG oslo_concurrency.lockutils [req-9eb0647b-9818-4f83-892e-45aa7eec8f62 req-30311066-fa28-4808-8e48-8181f7c04b8b service nova] Releasing lock "refresh_cache-18022645-9a2a-489e-b0b1-486165f46f14" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 788.963465] env[69328]: DEBUG oslo_vmware.api [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Task: {'id': task-3273222, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08007} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.964095] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 788.965157] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b163180-3ad3-4fa0-a206-68f157008bc5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.990787] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Reconfiguring VM instance instance-00000030 to attach disk [datastore1] 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3/3b4b6687-fb6d-4bb7-8604-20a3ba706ff3.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 788.992032] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-13273e02-a069-4efd-b588-43a821813247 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.012044] env[69328]: DEBUG oslo_vmware.api [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Waiting for the task: (returnval){ [ 789.012044] env[69328]: value = "task-3273224" [ 789.012044] env[69328]: _type = "Task" [ 789.012044] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.021699] env[69328]: DEBUG oslo_vmware.api [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Task: {'id': task-3273224, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.074990] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f435ad6c-4dc5-4187-b2e6-05c874d8c156 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Lock "25fb207b-9388-4198-bb48-ab7cebd43375" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 82.429s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 789.252091] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7ff24e32-9bdb-4e6c-85cb-d89873012504 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Lock "3923403b-2e8f-4033-89ee-9a907aff1d49" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.889s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 789.319813] env[69328]: DEBUG oslo_vmware.api [None req-4d7312a4-9582-48df-bf50-43069609f258 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3273223, 'name': ReconfigVM_Task, 'duration_secs': 0.256344} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.320198] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d7312a4-9582-48df-bf50-43069609f258 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Reconfigured VM instance instance-0000002a to detach disk 2001 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 789.320402] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d7312a4-9582-48df-bf50-43069609f258 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 789.320688] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9c04dc0a-1fd6-4dfb-ba18-7a9dc9d34e57 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.326633] env[69328]: DEBUG oslo_vmware.api [None req-4d7312a4-9582-48df-bf50-43069609f258 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Waiting for the task: (returnval){ [ 789.326633] env[69328]: value = "task-3273225" [ 789.326633] env[69328]: _type = "Task" [ 789.326633] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.334933] env[69328]: DEBUG oslo_vmware.api [None req-4d7312a4-9582-48df-bf50-43069609f258 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3273225, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.373073] env[69328]: DEBUG oslo_concurrency.lockutils [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "refresh_cache-d10bee67-6294-4537-9ce7-4eedb8361ddc" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 789.373254] env[69328]: DEBUG oslo_concurrency.lockutils [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquired lock "refresh_cache-d10bee67-6294-4537-9ce7-4eedb8361ddc" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 789.373394] env[69328]: DEBUG nova.network.neutron [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 789.524480] env[69328]: DEBUG oslo_vmware.api [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Task: {'id': task-3273224, 'name': ReconfigVM_Task, 'duration_secs': 0.291175} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.524480] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Reconfigured VM instance instance-00000030 to attach disk [datastore1] 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3/3b4b6687-fb6d-4bb7-8604-20a3ba706ff3.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 789.524928] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Creating Virtual Disk of size 1048576 KB and adapter type paraVirtual on the data store {{(pid=69328) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1383}} [ 789.525610] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CreateVirtualDisk_Task with opID=oslo.vmware-5a4ebc61-816a-4c7e-9b2a-64d2c554cddb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.534484] env[69328]: DEBUG oslo_vmware.api [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Waiting for the task: (returnval){ [ 789.534484] env[69328]: value = "task-3273226" [ 789.534484] env[69328]: _type = "Task" [ 789.534484] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.544168] env[69328]: DEBUG oslo_vmware.api [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Task: {'id': task-3273226, 'name': CreateVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.576392] env[69328]: DEBUG nova.compute.manager [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 789.806732] env[69328]: DEBUG nova.compute.manager [req-2fce4b7b-76fb-4bd2-a7f1-2c64c811a804 req-abe4a587-fa98-4c2f-92e0-2b600360522c service nova] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Received event network-changed-33b9c9a0-82a8-4195-bb5a-d8e6e911ddc5 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 789.806920] env[69328]: DEBUG nova.compute.manager [req-2fce4b7b-76fb-4bd2-a7f1-2c64c811a804 req-abe4a587-fa98-4c2f-92e0-2b600360522c service nova] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Refreshing instance network info cache due to event network-changed-33b9c9a0-82a8-4195-bb5a-d8e6e911ddc5. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 789.807315] env[69328]: DEBUG oslo_concurrency.lockutils [req-2fce4b7b-76fb-4bd2-a7f1-2c64c811a804 req-abe4a587-fa98-4c2f-92e0-2b600360522c service nova] Acquiring lock "refresh_cache-8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 789.807517] env[69328]: DEBUG oslo_concurrency.lockutils [req-2fce4b7b-76fb-4bd2-a7f1-2c64c811a804 req-abe4a587-fa98-4c2f-92e0-2b600360522c service nova] Acquired lock "refresh_cache-8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 789.807684] env[69328]: DEBUG nova.network.neutron [req-2fce4b7b-76fb-4bd2-a7f1-2c64c811a804 req-abe4a587-fa98-4c2f-92e0-2b600360522c service nova] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Refreshing network info cache for port 33b9c9a0-82a8-4195-bb5a-d8e6e911ddc5 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 789.840195] env[69328]: DEBUG oslo_vmware.api [None req-4d7312a4-9582-48df-bf50-43069609f258 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3273225, 'name': PowerOnVM_Task, 'duration_secs': 0.467107} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.840195] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d7312a4-9582-48df-bf50-43069609f258 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 789.840476] env[69328]: DEBUG nova.compute.manager [None req-4d7312a4-9582-48df-bf50-43069609f258 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 789.842176] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fe138af-b286-4c20-932d-7587db81fa0a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.922762] env[69328]: DEBUG nova.network.neutron [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 790.048663] env[69328]: DEBUG oslo_vmware.api [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Task: {'id': task-3273226, 'name': CreateVirtualDisk_Task, 'duration_secs': 0.058405} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.048936] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Created Virtual Disk of size 1048576 KB and type thin {{(pid=69328) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1404}} [ 790.049736] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-466211e4-d061-48e2-bce2-17b1066835a1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.079438] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Reconfiguring VM instance instance-00000030 to attach disk [datastore1] 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3/ephemeral_0.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 790.084064] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0ab5faee-d0b8-4b68-944e-1ca57f2a494a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.104947] env[69328]: DEBUG oslo_vmware.api [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Waiting for the task: (returnval){ [ 790.104947] env[69328]: value = "task-3273227" [ 790.104947] env[69328]: _type = "Task" [ 790.104947] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.111424] env[69328]: DEBUG oslo_concurrency.lockutils [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 790.117613] env[69328]: DEBUG oslo_vmware.api [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Task: {'id': task-3273227, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.166464] env[69328]: DEBUG nova.compute.manager [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Stashing vm_state: active {{(pid=69328) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 790.212867] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61833e23-756f-4e3b-8b9b-f886ea002649 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.219901] env[69328]: DEBUG nova.network.neutron [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Updating instance_info_cache with network_info: [{"id": "7e9163b1-a349-4287-bbfe-8147dc2e52dd", "address": "fa:16:3e:fe:39:34", "network": {"id": "e2ab6957-2c9d-4b95-91bd-5a62d9a284ba", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-967470666-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75d5853e3c724d02bacfa75173e38ab3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abcf0d10-3f3f-45dc-923e-1c78766e2dad", "external-id": "nsx-vlan-transportzone-405", "segmentation_id": 405, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e9163b1-a3", "ovs_interfaceid": "7e9163b1-a349-4287-bbfe-8147dc2e52dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 790.227582] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caa7a8c2-381a-4701-a61c-2761f68f211b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.266976] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa0c534f-9c81-4736-8f74-19446ae4b703 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.276032] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae51997f-af7e-4a38-a4e8-c877452bfc22 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.292292] env[69328]: DEBUG nova.compute.provider_tree [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 790.473026] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a15a5191-3036-4059-83a7-dcdee25dcac7 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Acquiring lock "e92953f4-b634-4ef9-a5ad-63a886cfa007" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 790.473026] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a15a5191-3036-4059-83a7-dcdee25dcac7 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Lock "e92953f4-b634-4ef9-a5ad-63a886cfa007" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 790.473026] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a15a5191-3036-4059-83a7-dcdee25dcac7 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Acquiring lock "e92953f4-b634-4ef9-a5ad-63a886cfa007-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 790.473026] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a15a5191-3036-4059-83a7-dcdee25dcac7 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Lock "e92953f4-b634-4ef9-a5ad-63a886cfa007-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 790.473394] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a15a5191-3036-4059-83a7-dcdee25dcac7 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Lock "e92953f4-b634-4ef9-a5ad-63a886cfa007-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 790.474706] env[69328]: INFO nova.compute.manager [None req-a15a5191-3036-4059-83a7-dcdee25dcac7 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Terminating instance [ 790.615858] env[69328]: DEBUG oslo_vmware.api [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Task: {'id': task-3273227, 'name': ReconfigVM_Task, 'duration_secs': 0.479716} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.616163] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Reconfigured VM instance instance-00000030 to attach disk [datastore1] 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3/ephemeral_0.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 790.616855] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-095d3766-2ccb-4f86-b32b-3ddd680120e7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.626557] env[69328]: DEBUG oslo_vmware.api [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Waiting for the task: (returnval){ [ 790.626557] env[69328]: value = "task-3273228" [ 790.626557] env[69328]: _type = "Task" [ 790.626557] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.637876] env[69328]: DEBUG oslo_vmware.api [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Task: {'id': task-3273228, 'name': Rename_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.689512] env[69328]: DEBUG oslo_concurrency.lockutils [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 790.723916] env[69328]: DEBUG oslo_concurrency.lockutils [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Releasing lock "refresh_cache-d10bee67-6294-4537-9ce7-4eedb8361ddc" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 790.724366] env[69328]: DEBUG nova.compute.manager [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Instance network_info: |[{"id": "7e9163b1-a349-4287-bbfe-8147dc2e52dd", "address": "fa:16:3e:fe:39:34", "network": {"id": "e2ab6957-2c9d-4b95-91bd-5a62d9a284ba", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-967470666-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75d5853e3c724d02bacfa75173e38ab3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abcf0d10-3f3f-45dc-923e-1c78766e2dad", "external-id": "nsx-vlan-transportzone-405", "segmentation_id": 405, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e9163b1-a3", "ovs_interfaceid": "7e9163b1-a349-4287-bbfe-8147dc2e52dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 790.724819] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fe:39:34', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'abcf0d10-3f3f-45dc-923e-1c78766e2dad', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7e9163b1-a349-4287-bbfe-8147dc2e52dd', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 790.733313] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 790.737809] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 790.738132] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-37263870-3592-4e5f-aee8-33214e594f81 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.760661] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 790.760661] env[69328]: value = "task-3273229" [ 790.760661] env[69328]: _type = "Task" [ 790.760661] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.769345] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273229, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.819679] env[69328]: ERROR nova.scheduler.client.report [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [req-81b1e654-e5e9-4002-8ee1-d77936206fcb] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-81b1e654-e5e9-4002-8ee1-d77936206fcb"}]} [ 790.845909] env[69328]: DEBUG nova.scheduler.client.report [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Refreshing inventories for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 790.863063] env[69328]: DEBUG nova.scheduler.client.report [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Updating ProviderTree inventory for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 790.863438] env[69328]: DEBUG nova.compute.provider_tree [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 790.877588] env[69328]: DEBUG nova.scheduler.client.report [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Refreshing aggregate associations for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e, aggregates: None {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 790.902679] env[69328]: DEBUG nova.scheduler.client.report [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Refreshing trait associations for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e, traits: COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 790.935739] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a1b7b68d-5e62-40b9-8791-1ad9c1de04f2 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Acquiring lock "99e31dfd-5d41-4564-886f-becc25ca289c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 790.935997] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a1b7b68d-5e62-40b9-8791-1ad9c1de04f2 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Lock "99e31dfd-5d41-4564-886f-becc25ca289c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 790.936225] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a1b7b68d-5e62-40b9-8791-1ad9c1de04f2 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Acquiring lock "99e31dfd-5d41-4564-886f-becc25ca289c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 790.936415] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a1b7b68d-5e62-40b9-8791-1ad9c1de04f2 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Lock "99e31dfd-5d41-4564-886f-becc25ca289c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 790.936581] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a1b7b68d-5e62-40b9-8791-1ad9c1de04f2 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Lock "99e31dfd-5d41-4564-886f-becc25ca289c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 790.939743] env[69328]: INFO nova.compute.manager [None req-a1b7b68d-5e62-40b9-8791-1ad9c1de04f2 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Terminating instance [ 790.954807] env[69328]: DEBUG nova.network.neutron [req-2fce4b7b-76fb-4bd2-a7f1-2c64c811a804 req-abe4a587-fa98-4c2f-92e0-2b600360522c service nova] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Updated VIF entry in instance network info cache for port 33b9c9a0-82a8-4195-bb5a-d8e6e911ddc5. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 790.955191] env[69328]: DEBUG nova.network.neutron [req-2fce4b7b-76fb-4bd2-a7f1-2c64c811a804 req-abe4a587-fa98-4c2f-92e0-2b600360522c service nova] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Updating instance_info_cache with network_info: [{"id": "33b9c9a0-82a8-4195-bb5a-d8e6e911ddc5", "address": "fa:16:3e:55:c6:67", "network": {"id": "45671a9d-5017-4d83-b871-b0f453a85414", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-44603226-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.253", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "79faf009f74c4bb59df1cc4c6b0dadd4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d88bb07-f93c-45ca-bce7-230cb1f33833", "external-id": "nsx-vlan-transportzone-387", "segmentation_id": 387, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33b9c9a0-82", "ovs_interfaceid": "33b9c9a0-82a8-4195-bb5a-d8e6e911ddc5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 790.983237] env[69328]: DEBUG nova.compute.manager [None req-a15a5191-3036-4059-83a7-dcdee25dcac7 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 790.983237] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a15a5191-3036-4059-83a7-dcdee25dcac7 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 790.983827] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94a528b2-ac13-459e-9ef7-16f88ba838f8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.996566] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-a15a5191-3036-4059-83a7-dcdee25dcac7 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 790.996566] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4b8209f7-50f9-4e67-89d9-665014e1a408 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.004829] env[69328]: DEBUG oslo_vmware.api [None req-a15a5191-3036-4059-83a7-dcdee25dcac7 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Waiting for the task: (returnval){ [ 791.004829] env[69328]: value = "task-3273230" [ 791.004829] env[69328]: _type = "Task" [ 791.004829] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.014071] env[69328]: DEBUG oslo_vmware.api [None req-a15a5191-3036-4059-83a7-dcdee25dcac7 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Task: {'id': task-3273230, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.138457] env[69328]: DEBUG oslo_vmware.api [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Task: {'id': task-3273228, 'name': Rename_Task, 'duration_secs': 0.330098} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.138895] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 791.139913] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-61d12f9b-da79-4b4a-bd05-0b213c6a35ad {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.145465] env[69328]: DEBUG oslo_vmware.api [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Waiting for the task: (returnval){ [ 791.145465] env[69328]: value = "task-3273231" [ 791.145465] env[69328]: _type = "Task" [ 791.145465] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.155435] env[69328]: DEBUG oslo_vmware.api [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Task: {'id': task-3273231, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.274614] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273229, 'name': CreateVM_Task, 'duration_secs': 0.439701} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.277156] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 791.278329] env[69328]: DEBUG oslo_concurrency.lockutils [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.278531] env[69328]: DEBUG oslo_concurrency.lockutils [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 791.279552] env[69328]: DEBUG oslo_concurrency.lockutils [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 791.279552] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48dba8e6-0a0f-449d-8aa8-34cea06743b3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.286052] env[69328]: DEBUG oslo_vmware.api [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 791.286052] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52935e1e-fa57-f50d-fc68-3698faf5c161" [ 791.286052] env[69328]: _type = "Task" [ 791.286052] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.297623] env[69328]: DEBUG oslo_vmware.api [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52935e1e-fa57-f50d-fc68-3698faf5c161, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.448076] env[69328]: DEBUG nova.compute.manager [None req-a1b7b68d-5e62-40b9-8791-1ad9c1de04f2 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 791.448076] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a1b7b68d-5e62-40b9-8791-1ad9c1de04f2 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 791.448076] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f6d0e96-d487-4dee-b2ea-f9c153053253 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.458497] env[69328]: DEBUG oslo_concurrency.lockutils [req-2fce4b7b-76fb-4bd2-a7f1-2c64c811a804 req-abe4a587-fa98-4c2f-92e0-2b600360522c service nova] Releasing lock "refresh_cache-8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 791.458680] env[69328]: DEBUG nova.compute.manager [req-2fce4b7b-76fb-4bd2-a7f1-2c64c811a804 req-abe4a587-fa98-4c2f-92e0-2b600360522c service nova] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Received event network-vif-plugged-7e9163b1-a349-4287-bbfe-8147dc2e52dd {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 791.459468] env[69328]: DEBUG oslo_concurrency.lockutils [req-2fce4b7b-76fb-4bd2-a7f1-2c64c811a804 req-abe4a587-fa98-4c2f-92e0-2b600360522c service nova] Acquiring lock "d10bee67-6294-4537-9ce7-4eedb8361ddc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 791.459468] env[69328]: DEBUG oslo_concurrency.lockutils [req-2fce4b7b-76fb-4bd2-a7f1-2c64c811a804 req-abe4a587-fa98-4c2f-92e0-2b600360522c service nova] Lock "d10bee67-6294-4537-9ce7-4eedb8361ddc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 791.459468] env[69328]: DEBUG oslo_concurrency.lockutils [req-2fce4b7b-76fb-4bd2-a7f1-2c64c811a804 req-abe4a587-fa98-4c2f-92e0-2b600360522c service nova] Lock "d10bee67-6294-4537-9ce7-4eedb8361ddc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 791.459468] env[69328]: DEBUG nova.compute.manager [req-2fce4b7b-76fb-4bd2-a7f1-2c64c811a804 req-abe4a587-fa98-4c2f-92e0-2b600360522c service nova] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] No waiting events found dispatching network-vif-plugged-7e9163b1-a349-4287-bbfe-8147dc2e52dd {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 791.459699] env[69328]: WARNING nova.compute.manager [req-2fce4b7b-76fb-4bd2-a7f1-2c64c811a804 req-abe4a587-fa98-4c2f-92e0-2b600360522c service nova] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Received unexpected event network-vif-plugged-7e9163b1-a349-4287-bbfe-8147dc2e52dd for instance with vm_state building and task_state spawning. [ 791.459983] env[69328]: DEBUG nova.compute.manager [req-2fce4b7b-76fb-4bd2-a7f1-2c64c811a804 req-abe4a587-fa98-4c2f-92e0-2b600360522c service nova] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Received event network-changed-7e9163b1-a349-4287-bbfe-8147dc2e52dd {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 791.459983] env[69328]: DEBUG nova.compute.manager [req-2fce4b7b-76fb-4bd2-a7f1-2c64c811a804 req-abe4a587-fa98-4c2f-92e0-2b600360522c service nova] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Refreshing instance network info cache due to event network-changed-7e9163b1-a349-4287-bbfe-8147dc2e52dd. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 791.460221] env[69328]: DEBUG oslo_concurrency.lockutils [req-2fce4b7b-76fb-4bd2-a7f1-2c64c811a804 req-abe4a587-fa98-4c2f-92e0-2b600360522c service nova] Acquiring lock "refresh_cache-d10bee67-6294-4537-9ce7-4eedb8361ddc" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.460366] env[69328]: DEBUG oslo_concurrency.lockutils [req-2fce4b7b-76fb-4bd2-a7f1-2c64c811a804 req-abe4a587-fa98-4c2f-92e0-2b600360522c service nova] Acquired lock "refresh_cache-d10bee67-6294-4537-9ce7-4eedb8361ddc" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 791.460534] env[69328]: DEBUG nova.network.neutron [req-2fce4b7b-76fb-4bd2-a7f1-2c64c811a804 req-abe4a587-fa98-4c2f-92e0-2b600360522c service nova] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Refreshing network info cache for port 7e9163b1-a349-4287-bbfe-8147dc2e52dd {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 791.462559] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1b7b68d-5e62-40b9-8791-1ad9c1de04f2 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 791.466115] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fe2334c4-3f35-4dce-8fd0-00d58cdfe0f5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.475098] env[69328]: DEBUG oslo_vmware.api [None req-a1b7b68d-5e62-40b9-8791-1ad9c1de04f2 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Waiting for the task: (returnval){ [ 791.475098] env[69328]: value = "task-3273232" [ 791.475098] env[69328]: _type = "Task" [ 791.475098] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.477059] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87e7c817-a7c7-4f81-970a-368e7caf2616 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.491047] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-048c7287-a5e4-4fb9-8a23-93282eed1886 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.495131] env[69328]: DEBUG oslo_vmware.api [None req-a1b7b68d-5e62-40b9-8791-1ad9c1de04f2 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3273232, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.527342] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d39fe40-d8f9-41a3-89f0-785c55babb25 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.536193] env[69328]: DEBUG oslo_vmware.api [None req-a15a5191-3036-4059-83a7-dcdee25dcac7 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Task: {'id': task-3273230, 'name': PowerOffVM_Task, 'duration_secs': 0.242633} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.540699] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-a15a5191-3036-4059-83a7-dcdee25dcac7 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 791.540699] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a15a5191-3036-4059-83a7-dcdee25dcac7 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 791.540699] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4a949bef-d7ae-47da-8ae3-05f64d1e3951 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.541526] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20a70839-50a9-4ce8-a8c6-488137a548ad {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.556770] env[69328]: DEBUG nova.compute.provider_tree [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 791.615706] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a15a5191-3036-4059-83a7-dcdee25dcac7 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 791.616313] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a15a5191-3036-4059-83a7-dcdee25dcac7 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 791.616313] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-a15a5191-3036-4059-83a7-dcdee25dcac7 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Deleting the datastore file [datastore1] e92953f4-b634-4ef9-a5ad-63a886cfa007 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 791.616909] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dfb08587-649a-4759-8a9d-cf811cf003bf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.623179] env[69328]: DEBUG oslo_vmware.api [None req-a15a5191-3036-4059-83a7-dcdee25dcac7 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Waiting for the task: (returnval){ [ 791.623179] env[69328]: value = "task-3273234" [ 791.623179] env[69328]: _type = "Task" [ 791.623179] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.634774] env[69328]: DEBUG oslo_vmware.api [None req-a15a5191-3036-4059-83a7-dcdee25dcac7 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Task: {'id': task-3273234, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.657527] env[69328]: DEBUG oslo_vmware.api [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Task: {'id': task-3273231, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.799166] env[69328]: DEBUG oslo_vmware.api [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52935e1e-fa57-f50d-fc68-3698faf5c161, 'name': SearchDatastore_Task, 'duration_secs': 0.012117} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.799570] env[69328]: DEBUG oslo_concurrency.lockutils [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 791.799886] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 791.800221] env[69328]: DEBUG oslo_concurrency.lockutils [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.800417] env[69328]: DEBUG oslo_concurrency.lockutils [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 791.800680] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 791.802314] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d8c486cd-a740-40fc-b364-793e71e58e7c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.812909] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 791.813133] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 791.814010] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ca5252e-b9b0-4c04-abf8-2cc6a183e8cb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.819947] env[69328]: DEBUG oslo_vmware.api [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 791.819947] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52764df8-05e4-99a4-2e6f-a0ed7a34758a" [ 791.819947] env[69328]: _type = "Task" [ 791.819947] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.829099] env[69328]: DEBUG oslo_vmware.api [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52764df8-05e4-99a4-2e6f-a0ed7a34758a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.989444] env[69328]: DEBUG oslo_vmware.api [None req-a1b7b68d-5e62-40b9-8791-1ad9c1de04f2 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3273232, 'name': PowerOffVM_Task, 'duration_secs': 0.213823} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.989785] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1b7b68d-5e62-40b9-8791-1ad9c1de04f2 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 791.990061] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a1b7b68d-5e62-40b9-8791-1ad9c1de04f2 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 791.990392] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d8633923-d67f-4851-8676-e04db0004887 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.083450] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a1b7b68d-5e62-40b9-8791-1ad9c1de04f2 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 792.083450] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a1b7b68d-5e62-40b9-8791-1ad9c1de04f2 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 792.083450] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1b7b68d-5e62-40b9-8791-1ad9c1de04f2 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Deleting the datastore file [datastore2] 99e31dfd-5d41-4564-886f-becc25ca289c {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 792.083596] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9249abc5-dbe5-4915-bd4b-db5fddb96d0c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.091068] env[69328]: DEBUG oslo_vmware.api [None req-a1b7b68d-5e62-40b9-8791-1ad9c1de04f2 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Waiting for the task: (returnval){ [ 792.091068] env[69328]: value = "task-3273236" [ 792.091068] env[69328]: _type = "Task" [ 792.091068] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.097372] env[69328]: DEBUG nova.scheduler.client.report [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Updated inventory for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with generation 77 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 792.097372] env[69328]: DEBUG nova.compute.provider_tree [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Updating resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e generation from 77 to 78 during operation: update_inventory {{(pid=69328) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 792.097372] env[69328]: DEBUG nova.compute.provider_tree [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 792.104268] env[69328]: DEBUG oslo_vmware.api [None req-a1b7b68d-5e62-40b9-8791-1ad9c1de04f2 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3273236, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.134266] env[69328]: DEBUG oslo_vmware.api [None req-a15a5191-3036-4059-83a7-dcdee25dcac7 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Task: {'id': task-3273234, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.48693} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.134536] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-a15a5191-3036-4059-83a7-dcdee25dcac7 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 792.134721] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a15a5191-3036-4059-83a7-dcdee25dcac7 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 792.135111] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a15a5191-3036-4059-83a7-dcdee25dcac7 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 792.135111] env[69328]: INFO nova.compute.manager [None req-a15a5191-3036-4059-83a7-dcdee25dcac7 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Took 1.15 seconds to destroy the instance on the hypervisor. [ 792.135322] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a15a5191-3036-4059-83a7-dcdee25dcac7 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 792.135514] env[69328]: DEBUG nova.compute.manager [-] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 792.136475] env[69328]: DEBUG nova.network.neutron [-] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 792.160596] env[69328]: DEBUG oslo_vmware.api [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Task: {'id': task-3273231, 'name': PowerOnVM_Task, 'duration_secs': 0.714678} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.160901] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 792.161300] env[69328]: INFO nova.compute.manager [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Took 9.09 seconds to spawn the instance on the hypervisor. [ 792.161547] env[69328]: DEBUG nova.compute.manager [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 792.162688] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f529d20b-443d-47e0-85d1-052f75107183 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.255266] env[69328]: DEBUG nova.network.neutron [req-2fce4b7b-76fb-4bd2-a7f1-2c64c811a804 req-abe4a587-fa98-4c2f-92e0-2b600360522c service nova] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Updated VIF entry in instance network info cache for port 7e9163b1-a349-4287-bbfe-8147dc2e52dd. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 792.255690] env[69328]: DEBUG nova.network.neutron [req-2fce4b7b-76fb-4bd2-a7f1-2c64c811a804 req-abe4a587-fa98-4c2f-92e0-2b600360522c service nova] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Updating instance_info_cache with network_info: [{"id": "7e9163b1-a349-4287-bbfe-8147dc2e52dd", "address": "fa:16:3e:fe:39:34", "network": {"id": "e2ab6957-2c9d-4b95-91bd-5a62d9a284ba", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-967470666-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75d5853e3c724d02bacfa75173e38ab3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abcf0d10-3f3f-45dc-923e-1c78766e2dad", "external-id": "nsx-vlan-transportzone-405", "segmentation_id": 405, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e9163b1-a3", "ovs_interfaceid": "7e9163b1-a349-4287-bbfe-8147dc2e52dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 792.334562] env[69328]: DEBUG oslo_vmware.api [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52764df8-05e4-99a4-2e6f-a0ed7a34758a, 'name': SearchDatastore_Task, 'duration_secs': 0.016557} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.335549] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d292ce87-7741-46c5-899c-8cc234937a6d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.341500] env[69328]: DEBUG oslo_vmware.api [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 792.341500] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52fed27f-38bc-db1b-b086-fa8aba8e78eb" [ 792.341500] env[69328]: _type = "Task" [ 792.341500] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.350849] env[69328]: DEBUG oslo_vmware.api [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52fed27f-38bc-db1b-b086-fa8aba8e78eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.603801] env[69328]: DEBUG oslo_vmware.api [None req-a1b7b68d-5e62-40b9-8791-1ad9c1de04f2 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3273236, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.405305} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.604115] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1b7b68d-5e62-40b9-8791-1ad9c1de04f2 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 792.604314] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a1b7b68d-5e62-40b9-8791-1ad9c1de04f2 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 792.606965] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a1b7b68d-5e62-40b9-8791-1ad9c1de04f2 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 792.606965] env[69328]: INFO nova.compute.manager [None req-a1b7b68d-5e62-40b9-8791-1ad9c1de04f2 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Took 1.16 seconds to destroy the instance on the hypervisor. [ 792.606965] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a1b7b68d-5e62-40b9-8791-1ad9c1de04f2 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 792.606965] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.893s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 792.606965] env[69328]: DEBUG nova.compute.manager [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 792.609080] env[69328]: DEBUG nova.compute.manager [-] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 792.609237] env[69328]: DEBUG nova.network.neutron [-] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 792.611052] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c002fcab-f554-4c43-ac90-8d8f68bdc43a tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.729s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 792.612136] env[69328]: DEBUG nova.objects.instance [None req-c002fcab-f554-4c43-ac90-8d8f68bdc43a tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lazy-loading 'resources' on Instance uuid c465c53f-d96b-461b-b8ff-b19929b4f789 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 792.622167] env[69328]: DEBUG nova.compute.manager [req-da6c4844-1af3-452b-93b9-5d87b4991c39 req-942228b0-2f85-4da0-8e19-209edf860de1 service nova] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Received event network-vif-deleted-efc73e76-7767-42a3-b5a4-3891364b487f {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 792.622167] env[69328]: INFO nova.compute.manager [req-da6c4844-1af3-452b-93b9-5d87b4991c39 req-942228b0-2f85-4da0-8e19-209edf860de1 service nova] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Neutron deleted interface efc73e76-7767-42a3-b5a4-3891364b487f; detaching it from the instance and deleting it from the info cache [ 792.622167] env[69328]: DEBUG nova.network.neutron [req-da6c4844-1af3-452b-93b9-5d87b4991c39 req-942228b0-2f85-4da0-8e19-209edf860de1 service nova] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 792.685670] env[69328]: INFO nova.compute.manager [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Took 42.98 seconds to build instance. [ 792.758475] env[69328]: DEBUG oslo_concurrency.lockutils [req-2fce4b7b-76fb-4bd2-a7f1-2c64c811a804 req-abe4a587-fa98-4c2f-92e0-2b600360522c service nova] Releasing lock "refresh_cache-d10bee67-6294-4537-9ce7-4eedb8361ddc" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 792.852712] env[69328]: DEBUG oslo_vmware.api [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52fed27f-38bc-db1b-b086-fa8aba8e78eb, 'name': SearchDatastore_Task, 'duration_secs': 0.03654} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.852982] env[69328]: DEBUG oslo_concurrency.lockutils [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 792.854638] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] d10bee67-6294-4537-9ce7-4eedb8361ddc/d10bee67-6294-4537-9ce7-4eedb8361ddc.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 792.854945] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c84b4470-2572-4614-a2c4-48a19b62082f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.863380] env[69328]: DEBUG oslo_vmware.api [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 792.863380] env[69328]: value = "task-3273237" [ 792.863380] env[69328]: _type = "Task" [ 792.863380] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.873265] env[69328]: DEBUG oslo_vmware.api [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273237, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.004590] env[69328]: DEBUG nova.network.neutron [-] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 793.116441] env[69328]: DEBUG nova.compute.utils [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 793.121116] env[69328]: DEBUG nova.compute.manager [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 793.121922] env[69328]: DEBUG nova.network.neutron [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 793.124722] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e1ec4430-8f2c-465b-be8d-916ae6e75177 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.138676] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cefad2b-b7c0-4fa4-ad3f-4e1629ad0bbe {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.173917] env[69328]: DEBUG nova.compute.manager [req-da6c4844-1af3-452b-93b9-5d87b4991c39 req-942228b0-2f85-4da0-8e19-209edf860de1 service nova] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Detach interface failed, port_id=efc73e76-7767-42a3-b5a4-3891364b487f, reason: Instance e92953f4-b634-4ef9-a5ad-63a886cfa007 could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 793.184282] env[69328]: DEBUG nova.policy [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '929ab12fcdb943a48039c7508e6a0b35', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '088bc9e3aeb449baa0a522342d57d183', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 793.191689] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e04dae18-7c57-496d-a5f6-7dac08ea7b00 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Lock "3b4b6687-fb6d-4bb7-8604-20a3ba706ff3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.986s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 793.377378] env[69328]: DEBUG oslo_vmware.api [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273237, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.496722] env[69328]: DEBUG nova.network.neutron [-] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 793.509230] env[69328]: INFO nova.compute.manager [-] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Took 1.37 seconds to deallocate network for instance. [ 793.622431] env[69328]: DEBUG nova.compute.manager [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 793.631038] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f341df56-5447-4353-937a-6a396b2b4f07 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.642336] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5134b095-515b-4501-be97-d902484142da {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.697187] env[69328]: DEBUG nova.compute.manager [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 793.706120] env[69328]: DEBUG nova.network.neutron [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Successfully created port: f11b7e60-0d64-4eba-a305-c8a67f80d4b8 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 793.714191] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ca33365-c7e7-45ab-95d2-81b13d626c5e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.727087] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59e2cad5-0315-40da-b636-b8cc181f96e6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.748025] env[69328]: DEBUG nova.compute.provider_tree [None req-c002fcab-f554-4c43-ac90-8d8f68bdc43a tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 793.874730] env[69328]: DEBUG oslo_vmware.api [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273237, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.82983} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.875088] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] d10bee67-6294-4537-9ce7-4eedb8361ddc/d10bee67-6294-4537-9ce7-4eedb8361ddc.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 793.875318] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 793.875592] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-93daa93d-ea23-46fd-9d62-57b8e09f8f60 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.887871] env[69328]: DEBUG oslo_vmware.api [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 793.887871] env[69328]: value = "task-3273238" [ 793.887871] env[69328]: _type = "Task" [ 793.887871] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.895955] env[69328]: DEBUG oslo_vmware.api [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273238, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.998995] env[69328]: INFO nova.compute.manager [-] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Took 1.39 seconds to deallocate network for instance. [ 794.021187] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a15a5191-3036-4059-83a7-dcdee25dcac7 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 794.157470] env[69328]: DEBUG nova.compute.manager [req-84a771a0-020b-4320-b90d-6a480342e14c req-b0ca9d72-fdad-4249-9622-0642cfe7b80e service nova] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Received event network-changed-4ffb2723-2cb7-4f04-8e1b-208a6329288e {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 794.157470] env[69328]: DEBUG nova.compute.manager [req-84a771a0-020b-4320-b90d-6a480342e14c req-b0ca9d72-fdad-4249-9622-0642cfe7b80e service nova] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Refreshing instance network info cache due to event network-changed-4ffb2723-2cb7-4f04-8e1b-208a6329288e. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 794.157470] env[69328]: DEBUG oslo_concurrency.lockutils [req-84a771a0-020b-4320-b90d-6a480342e14c req-b0ca9d72-fdad-4249-9622-0642cfe7b80e service nova] Acquiring lock "refresh_cache-3b4b6687-fb6d-4bb7-8604-20a3ba706ff3" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 794.157470] env[69328]: DEBUG oslo_concurrency.lockutils [req-84a771a0-020b-4320-b90d-6a480342e14c req-b0ca9d72-fdad-4249-9622-0642cfe7b80e service nova] Acquired lock "refresh_cache-3b4b6687-fb6d-4bb7-8604-20a3ba706ff3" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 794.157470] env[69328]: DEBUG nova.network.neutron [req-84a771a0-020b-4320-b90d-6a480342e14c req-b0ca9d72-fdad-4249-9622-0642cfe7b80e service nova] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Refreshing network info cache for port 4ffb2723-2cb7-4f04-8e1b-208a6329288e {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 794.239456] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 794.251614] env[69328]: DEBUG nova.scheduler.client.report [None req-c002fcab-f554-4c43-ac90-8d8f68bdc43a tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 794.395382] env[69328]: DEBUG oslo_vmware.api [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273238, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073003} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.395689] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 794.396574] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-960ad006-ed0d-46cb-a592-56024e485d79 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.421472] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Reconfiguring VM instance instance-00000031 to attach disk [datastore1] d10bee67-6294-4537-9ce7-4eedb8361ddc/d10bee67-6294-4537-9ce7-4eedb8361ddc.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 794.422229] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a1fd2571-88be-4a6b-a104-6f42157f2d18 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.444405] env[69328]: DEBUG oslo_vmware.api [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 794.444405] env[69328]: value = "task-3273239" [ 794.444405] env[69328]: _type = "Task" [ 794.444405] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.454181] env[69328]: DEBUG oslo_vmware.api [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273239, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.505489] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a1b7b68d-5e62-40b9-8791-1ad9c1de04f2 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 794.632830] env[69328]: DEBUG nova.compute.manager [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 794.659143] env[69328]: DEBUG nova.virt.hardware [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 794.659472] env[69328]: DEBUG nova.virt.hardware [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 794.659690] env[69328]: DEBUG nova.virt.hardware [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 794.659856] env[69328]: DEBUG nova.virt.hardware [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 794.660035] env[69328]: DEBUG nova.virt.hardware [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 794.660202] env[69328]: DEBUG nova.virt.hardware [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 794.660427] env[69328]: DEBUG nova.virt.hardware [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 794.660637] env[69328]: DEBUG nova.virt.hardware [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 794.660778] env[69328]: DEBUG nova.virt.hardware [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 794.660963] env[69328]: DEBUG nova.virt.hardware [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 794.661292] env[69328]: DEBUG nova.virt.hardware [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 794.664845] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-605193ac-36ac-4685-a39a-25a876b20b72 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.673530] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14e18250-5623-42fc-9860-09c7a912bdf9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.704329] env[69328]: DEBUG nova.compute.manager [req-5a4e523d-2f62-4fa8-8e13-ad83f5f7b3de req-ee8f7a8e-6d21-4417-8fc8-5feec752dc20 service nova] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Received event network-vif-deleted-90f7115d-cbd5-42dd-a07a-5eb45deb5276 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 794.757074] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c002fcab-f554-4c43-ac90-8d8f68bdc43a tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.146s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 794.763617] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.801s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 794.766012] env[69328]: INFO nova.compute.claims [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 794.788392] env[69328]: INFO nova.scheduler.client.report [None req-c002fcab-f554-4c43-ac90-8d8f68bdc43a tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Deleted allocations for instance c465c53f-d96b-461b-b8ff-b19929b4f789 [ 794.953252] env[69328]: DEBUG nova.network.neutron [req-84a771a0-020b-4320-b90d-6a480342e14c req-b0ca9d72-fdad-4249-9622-0642cfe7b80e service nova] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Updated VIF entry in instance network info cache for port 4ffb2723-2cb7-4f04-8e1b-208a6329288e. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 794.953704] env[69328]: DEBUG nova.network.neutron [req-84a771a0-020b-4320-b90d-6a480342e14c req-b0ca9d72-fdad-4249-9622-0642cfe7b80e service nova] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Updating instance_info_cache with network_info: [{"id": "4ffb2723-2cb7-4f04-8e1b-208a6329288e", "address": "fa:16:3e:ad:66:bb", "network": {"id": "023046e8-e113-4ce9-95d7-1c04fc034ba6", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-89845670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.162", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1467d48a61f7410b8f6d5a981d169563", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ffb2723-2c", "ovs_interfaceid": "4ffb2723-2cb7-4f04-8e1b-208a6329288e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 794.958136] env[69328]: DEBUG oslo_vmware.api [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273239, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.299444] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c002fcab-f554-4c43-ac90-8d8f68bdc43a tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "c465c53f-d96b-461b-b8ff-b19929b4f789" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.705s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 795.457531] env[69328]: DEBUG oslo_vmware.api [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273239, 'name': ReconfigVM_Task, 'duration_secs': 0.836317} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.457897] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Reconfigured VM instance instance-00000031 to attach disk [datastore1] d10bee67-6294-4537-9ce7-4eedb8361ddc/d10bee67-6294-4537-9ce7-4eedb8361ddc.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 795.458867] env[69328]: DEBUG oslo_concurrency.lockutils [req-84a771a0-020b-4320-b90d-6a480342e14c req-b0ca9d72-fdad-4249-9622-0642cfe7b80e service nova] Releasing lock "refresh_cache-3b4b6687-fb6d-4bb7-8604-20a3ba706ff3" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 795.459331] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fb5d4777-3e11-4551-b1f4-2a7cdcb0f951 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.468095] env[69328]: DEBUG oslo_vmware.api [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 795.468095] env[69328]: value = "task-3273240" [ 795.468095] env[69328]: _type = "Task" [ 795.468095] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.473199] env[69328]: DEBUG nova.network.neutron [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Successfully updated port: f11b7e60-0d64-4eba-a305-c8a67f80d4b8 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 795.479730] env[69328]: DEBUG oslo_vmware.api [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273240, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.981038] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquiring lock "refresh_cache-4d320c76-45bb-451c-8fbb-3dd2d64f56d5" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 795.981236] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquired lock "refresh_cache-4d320c76-45bb-451c-8fbb-3dd2d64f56d5" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 795.981398] env[69328]: DEBUG nova.network.neutron [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 795.982666] env[69328]: DEBUG oslo_vmware.api [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273240, 'name': Rename_Task, 'duration_secs': 0.274628} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.983376] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 795.983376] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bc59f3d6-05f9-4ea1-bf6f-18dcead98117 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.994999] env[69328]: DEBUG oslo_vmware.api [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 795.994999] env[69328]: value = "task-3273241" [ 795.994999] env[69328]: _type = "Task" [ 795.994999] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.007011] env[69328]: DEBUG oslo_vmware.api [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273241, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.159733] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aabb07f4-e5a6-4776-b48f-b02797feff0e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.169635] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1632b168-53d8-433c-9eb0-0888a82560c9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.218376] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6e811a2-4a07-4155-9900-012c92bc1caf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.229260] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35829cbe-7e63-4c77-b7b9-4083b73f09c7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.249795] env[69328]: DEBUG nova.compute.provider_tree [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 796.510833] env[69328]: DEBUG oslo_vmware.api [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273241, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.544884] env[69328]: DEBUG nova.network.neutron [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 796.754880] env[69328]: DEBUG nova.scheduler.client.report [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 796.773952] env[69328]: DEBUG nova.compute.manager [req-277197b0-99d4-4895-89b6-c4deea7b0c05 req-73a492b5-5120-42cf-98e8-abb1277503c0 service nova] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Received event network-vif-plugged-f11b7e60-0d64-4eba-a305-c8a67f80d4b8 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 796.773952] env[69328]: DEBUG oslo_concurrency.lockutils [req-277197b0-99d4-4895-89b6-c4deea7b0c05 req-73a492b5-5120-42cf-98e8-abb1277503c0 service nova] Acquiring lock "4d320c76-45bb-451c-8fbb-3dd2d64f56d5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 796.773952] env[69328]: DEBUG oslo_concurrency.lockutils [req-277197b0-99d4-4895-89b6-c4deea7b0c05 req-73a492b5-5120-42cf-98e8-abb1277503c0 service nova] Lock "4d320c76-45bb-451c-8fbb-3dd2d64f56d5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 796.773952] env[69328]: DEBUG oslo_concurrency.lockutils [req-277197b0-99d4-4895-89b6-c4deea7b0c05 req-73a492b5-5120-42cf-98e8-abb1277503c0 service nova] Lock "4d320c76-45bb-451c-8fbb-3dd2d64f56d5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 796.773952] env[69328]: DEBUG nova.compute.manager [req-277197b0-99d4-4895-89b6-c4deea7b0c05 req-73a492b5-5120-42cf-98e8-abb1277503c0 service nova] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] No waiting events found dispatching network-vif-plugged-f11b7e60-0d64-4eba-a305-c8a67f80d4b8 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 796.774282] env[69328]: WARNING nova.compute.manager [req-277197b0-99d4-4895-89b6-c4deea7b0c05 req-73a492b5-5120-42cf-98e8-abb1277503c0 service nova] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Received unexpected event network-vif-plugged-f11b7e60-0d64-4eba-a305-c8a67f80d4b8 for instance with vm_state building and task_state spawning. [ 796.778166] env[69328]: DEBUG nova.compute.manager [req-277197b0-99d4-4895-89b6-c4deea7b0c05 req-73a492b5-5120-42cf-98e8-abb1277503c0 service nova] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Received event network-changed-f11b7e60-0d64-4eba-a305-c8a67f80d4b8 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 796.778166] env[69328]: DEBUG nova.compute.manager [req-277197b0-99d4-4895-89b6-c4deea7b0c05 req-73a492b5-5120-42cf-98e8-abb1277503c0 service nova] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Refreshing instance network info cache due to event network-changed-f11b7e60-0d64-4eba-a305-c8a67f80d4b8. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 796.778166] env[69328]: DEBUG oslo_concurrency.lockutils [req-277197b0-99d4-4895-89b6-c4deea7b0c05 req-73a492b5-5120-42cf-98e8-abb1277503c0 service nova] Acquiring lock "refresh_cache-4d320c76-45bb-451c-8fbb-3dd2d64f56d5" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 796.842666] env[69328]: DEBUG nova.network.neutron [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Updating instance_info_cache with network_info: [{"id": "f11b7e60-0d64-4eba-a305-c8a67f80d4b8", "address": "fa:16:3e:8d:96:4e", "network": {"id": "2e8bdd1b-0cca-4f7c-bba3-ff1750073020", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2058593014-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "088bc9e3aeb449baa0a522342d57d183", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf11b7e60-0d", "ovs_interfaceid": "f11b7e60-0d64-4eba-a305-c8a67f80d4b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 797.005905] env[69328]: DEBUG oslo_vmware.api [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273241, 'name': PowerOnVM_Task, 'duration_secs': 0.665182} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.006265] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 797.006638] env[69328]: INFO nova.compute.manager [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Took 9.66 seconds to spawn the instance on the hypervisor. [ 797.006834] env[69328]: DEBUG nova.compute.manager [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 797.007677] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0341eb0b-22e0-42a1-a7ea-d3c64068ed9b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.264070] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.501s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 797.264620] env[69328]: DEBUG nova.compute.manager [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 797.267282] env[69328]: DEBUG oslo_concurrency.lockutils [None req-dff6540d-b8fa-4825-b027-b626caa9df94 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.139s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 797.271016] env[69328]: DEBUG nova.objects.instance [None req-dff6540d-b8fa-4825-b027-b626caa9df94 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Lazy-loading 'resources' on Instance uuid d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 797.345809] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Releasing lock "refresh_cache-4d320c76-45bb-451c-8fbb-3dd2d64f56d5" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 797.346161] env[69328]: DEBUG nova.compute.manager [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Instance network_info: |[{"id": "f11b7e60-0d64-4eba-a305-c8a67f80d4b8", "address": "fa:16:3e:8d:96:4e", "network": {"id": "2e8bdd1b-0cca-4f7c-bba3-ff1750073020", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2058593014-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "088bc9e3aeb449baa0a522342d57d183", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf11b7e60-0d", "ovs_interfaceid": "f11b7e60-0d64-4eba-a305-c8a67f80d4b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 797.346495] env[69328]: DEBUG oslo_concurrency.lockutils [req-277197b0-99d4-4895-89b6-c4deea7b0c05 req-73a492b5-5120-42cf-98e8-abb1277503c0 service nova] Acquired lock "refresh_cache-4d320c76-45bb-451c-8fbb-3dd2d64f56d5" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 797.346680] env[69328]: DEBUG nova.network.neutron [req-277197b0-99d4-4895-89b6-c4deea7b0c05 req-73a492b5-5120-42cf-98e8-abb1277503c0 service nova] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Refreshing network info cache for port f11b7e60-0d64-4eba-a305-c8a67f80d4b8 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 797.347843] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8d:96:4e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '43ad01d2-c7dd-453c-a929-8ad76294d13c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f11b7e60-0d64-4eba-a305-c8a67f80d4b8', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 797.356889] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 797.356889] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 797.356889] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c6474647-ef50-4f00-bd3c-b65f697e2720 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.382186] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 797.382186] env[69328]: value = "task-3273242" [ 797.382186] env[69328]: _type = "Task" [ 797.382186] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.396362] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273242, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.526264] env[69328]: INFO nova.compute.manager [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Took 40.48 seconds to build instance. [ 797.669641] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "6ccd0715-0903-4fed-bf80-240f386e4ad8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 797.670425] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "6ccd0715-0903-4fed-bf80-240f386e4ad8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 797.770575] env[69328]: DEBUG nova.compute.utils [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 797.772340] env[69328]: DEBUG nova.compute.manager [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 797.772530] env[69328]: DEBUG nova.network.neutron [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 797.896940] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273242, 'name': CreateVM_Task, 'duration_secs': 0.37029} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.898433] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 797.899410] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 797.899410] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 797.899555] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 797.899797] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-07d75c67-0daa-42af-80b7-3a58dc17ac45 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.905114] env[69328]: DEBUG nova.policy [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '435c64c503c043a29f90396ad3b070d9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '87581f423dc64e4fb9fe1d51ebc68597', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 797.916818] env[69328]: DEBUG oslo_vmware.api [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for the task: (returnval){ [ 797.916818] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]522d68dc-98f2-df25-057e-de227fd22def" [ 797.916818] env[69328]: _type = "Task" [ 797.916818] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.932862] env[69328]: DEBUG oslo_vmware.api [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]522d68dc-98f2-df25-057e-de227fd22def, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.030521] env[69328]: DEBUG oslo_concurrency.lockutils [None req-faeadd06-6a53-4a2a-b4c4-4713794bc756 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "d10bee67-6294-4537-9ce7-4eedb8361ddc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.856s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 798.228102] env[69328]: DEBUG nova.network.neutron [req-277197b0-99d4-4895-89b6-c4deea7b0c05 req-73a492b5-5120-42cf-98e8-abb1277503c0 service nova] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Updated VIF entry in instance network info cache for port f11b7e60-0d64-4eba-a305-c8a67f80d4b8. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 798.228767] env[69328]: DEBUG nova.network.neutron [req-277197b0-99d4-4895-89b6-c4deea7b0c05 req-73a492b5-5120-42cf-98e8-abb1277503c0 service nova] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Updating instance_info_cache with network_info: [{"id": "f11b7e60-0d64-4eba-a305-c8a67f80d4b8", "address": "fa:16:3e:8d:96:4e", "network": {"id": "2e8bdd1b-0cca-4f7c-bba3-ff1750073020", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2058593014-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "088bc9e3aeb449baa0a522342d57d183", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf11b7e60-0d", "ovs_interfaceid": "f11b7e60-0d64-4eba-a305-c8a67f80d4b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 798.278682] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d5b5418-f10d-4b5c-aab2-ddb55670a577 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.282841] env[69328]: DEBUG nova.compute.manager [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 798.291894] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a5ea209-118f-4688-a2a9-85bf85cbd122 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.330583] env[69328]: DEBUG nova.network.neutron [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Successfully created port: 87b2e37a-d778-4bd1-a107-6132378b5f4c {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 798.333150] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57562cf3-e0df-41b2-aadd-cb76bab7eb20 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.343689] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fafceef0-a001-4387-9504-fdbf82c69e0b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.358602] env[69328]: DEBUG nova.compute.provider_tree [None req-dff6540d-b8fa-4825-b027-b626caa9df94 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 798.426977] env[69328]: DEBUG oslo_vmware.api [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]522d68dc-98f2-df25-057e-de227fd22def, 'name': SearchDatastore_Task, 'duration_secs': 0.013455} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.427482] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 798.428021] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 798.428565] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 798.428565] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 798.428822] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 798.429187] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-42cdbd4d-473b-48d4-b3db-61acb0d22f66 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.438598] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 798.438923] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 798.439673] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b06ee4bc-3d8c-41e5-b1ea-a4e37d52277c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.446747] env[69328]: DEBUG oslo_vmware.api [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for the task: (returnval){ [ 798.446747] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b27cc1-eb0b-b3d0-8e31-0be97be46295" [ 798.446747] env[69328]: _type = "Task" [ 798.446747] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.458862] env[69328]: DEBUG oslo_vmware.api [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b27cc1-eb0b-b3d0-8e31-0be97be46295, 'name': SearchDatastore_Task, 'duration_secs': 0.009665} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.459962] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-573acfb3-30be-4873-ba7a-224309172032 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.467116] env[69328]: DEBUG oslo_vmware.api [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for the task: (returnval){ [ 798.467116] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5214748c-34ca-fa6a-85cf-5db6d15fbdd0" [ 798.467116] env[69328]: _type = "Task" [ 798.467116] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.476805] env[69328]: DEBUG oslo_vmware.api [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5214748c-34ca-fa6a-85cf-5db6d15fbdd0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.537273] env[69328]: DEBUG nova.compute.manager [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 798.736338] env[69328]: DEBUG oslo_concurrency.lockutils [req-277197b0-99d4-4895-89b6-c4deea7b0c05 req-73a492b5-5120-42cf-98e8-abb1277503c0 service nova] Releasing lock "refresh_cache-4d320c76-45bb-451c-8fbb-3dd2d64f56d5" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 798.864487] env[69328]: DEBUG nova.scheduler.client.report [None req-dff6540d-b8fa-4825-b027-b626caa9df94 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 798.979373] env[69328]: DEBUG oslo_vmware.api [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5214748c-34ca-fa6a-85cf-5db6d15fbdd0, 'name': SearchDatastore_Task, 'duration_secs': 0.010643} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.979637] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 798.979908] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 4d320c76-45bb-451c-8fbb-3dd2d64f56d5/4d320c76-45bb-451c-8fbb-3dd2d64f56d5.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 798.980254] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-11e461df-4be1-4351-8bd1-d0ea87853220 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.988390] env[69328]: DEBUG oslo_vmware.api [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for the task: (returnval){ [ 798.988390] env[69328]: value = "task-3273243" [ 798.988390] env[69328]: _type = "Task" [ 798.988390] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.996847] env[69328]: DEBUG oslo_vmware.api [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3273243, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.066480] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 799.293574] env[69328]: DEBUG nova.compute.manager [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 799.318191] env[69328]: DEBUG nova.virt.hardware [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 799.318502] env[69328]: DEBUG nova.virt.hardware [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 799.318609] env[69328]: DEBUG nova.virt.hardware [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 799.318841] env[69328]: DEBUG nova.virt.hardware [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 799.319058] env[69328]: DEBUG nova.virt.hardware [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 799.319238] env[69328]: DEBUG nova.virt.hardware [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 799.319852] env[69328]: DEBUG nova.virt.hardware [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 799.319852] env[69328]: DEBUG nova.virt.hardware [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 799.319852] env[69328]: DEBUG nova.virt.hardware [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 799.320093] env[69328]: DEBUG nova.virt.hardware [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 799.320093] env[69328]: DEBUG nova.virt.hardware [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 799.321068] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06efaf57-1135-430f-ba6d-e6dcc6479c4a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.330677] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-416eaa5b-ed30-4342-94c5-b91e8c7b35ee {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.371232] env[69328]: DEBUG oslo_concurrency.lockutils [None req-dff6540d-b8fa-4825-b027-b626caa9df94 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.104s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 799.373840] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cce60ede-7212-4a64-a3f5-72aa1be1edc1 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.277s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 799.375043] env[69328]: DEBUG nova.objects.instance [None req-cce60ede-7212-4a64-a3f5-72aa1be1edc1 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Lazy-loading 'resources' on Instance uuid 5292b759-9d1f-486a-b4d6-90519b3ae986 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 799.397310] env[69328]: INFO nova.compute.manager [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Rebuilding instance [ 799.409191] env[69328]: INFO nova.scheduler.client.report [None req-dff6540d-b8fa-4825-b027-b626caa9df94 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Deleted allocations for instance d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2 [ 799.449589] env[69328]: DEBUG nova.compute.manager [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 799.450804] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63bbe788-28fb-4670-9688-effb022d80e4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.499213] env[69328]: DEBUG oslo_vmware.api [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3273243, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.481516} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.500138] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 4d320c76-45bb-451c-8fbb-3dd2d64f56d5/4d320c76-45bb-451c-8fbb-3dd2d64f56d5.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 799.500377] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 799.500627] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-10f87fbe-4f1d-4df8-bd39-82e811a5e450 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.508107] env[69328]: DEBUG oslo_vmware.api [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for the task: (returnval){ [ 799.508107] env[69328]: value = "task-3273244" [ 799.508107] env[69328]: _type = "Task" [ 799.508107] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.517150] env[69328]: DEBUG oslo_vmware.api [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3273244, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.811247] env[69328]: DEBUG nova.compute.manager [req-73a0f9a7-7b17-4c84-82ae-705c26a6ecea req-b929d671-2fa1-4103-8d61-d64c8c59b55c service nova] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Received event network-vif-plugged-87b2e37a-d778-4bd1-a107-6132378b5f4c {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 799.811567] env[69328]: DEBUG oslo_concurrency.lockutils [req-73a0f9a7-7b17-4c84-82ae-705c26a6ecea req-b929d671-2fa1-4103-8d61-d64c8c59b55c service nova] Acquiring lock "55f44102-2891-4b6c-b31e-e8255a24d180-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 799.811870] env[69328]: DEBUG oslo_concurrency.lockutils [req-73a0f9a7-7b17-4c84-82ae-705c26a6ecea req-b929d671-2fa1-4103-8d61-d64c8c59b55c service nova] Lock "55f44102-2891-4b6c-b31e-e8255a24d180-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 799.812124] env[69328]: DEBUG oslo_concurrency.lockutils [req-73a0f9a7-7b17-4c84-82ae-705c26a6ecea req-b929d671-2fa1-4103-8d61-d64c8c59b55c service nova] Lock "55f44102-2891-4b6c-b31e-e8255a24d180-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 799.812392] env[69328]: DEBUG nova.compute.manager [req-73a0f9a7-7b17-4c84-82ae-705c26a6ecea req-b929d671-2fa1-4103-8d61-d64c8c59b55c service nova] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] No waiting events found dispatching network-vif-plugged-87b2e37a-d778-4bd1-a107-6132378b5f4c {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 799.812601] env[69328]: WARNING nova.compute.manager [req-73a0f9a7-7b17-4c84-82ae-705c26a6ecea req-b929d671-2fa1-4103-8d61-d64c8c59b55c service nova] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Received unexpected event network-vif-plugged-87b2e37a-d778-4bd1-a107-6132378b5f4c for instance with vm_state building and task_state spawning. [ 799.908081] env[69328]: DEBUG nova.network.neutron [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Successfully updated port: 87b2e37a-d778-4bd1-a107-6132378b5f4c {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 799.921360] env[69328]: DEBUG oslo_concurrency.lockutils [None req-dff6540d-b8fa-4825-b027-b626caa9df94 tempest-SecurityGroupsTestJSON-1550109346 tempest-SecurityGroupsTestJSON-1550109346-project-member] Lock "d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.641s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 799.956309] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Acquiring lock "e5a2de79-cfbc-4d9c-8b58-5aa819657978" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 799.956569] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Lock "e5a2de79-cfbc-4d9c-8b58-5aa819657978" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 800.017980] env[69328]: DEBUG oslo_vmware.api [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3273244, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069765} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.020666] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 800.021732] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f98b606-c4ed-4e43-8fb1-3b51e51e0a3d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.046263] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Reconfiguring VM instance instance-00000032 to attach disk [datastore2] 4d320c76-45bb-451c-8fbb-3dd2d64f56d5/4d320c76-45bb-451c-8fbb-3dd2d64f56d5.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 800.049419] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-371e6aa7-55fe-4617-98cd-63f70da69f24 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.073023] env[69328]: DEBUG oslo_vmware.api [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for the task: (returnval){ [ 800.073023] env[69328]: value = "task-3273245" [ 800.073023] env[69328]: _type = "Task" [ 800.073023] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.084817] env[69328]: DEBUG oslo_vmware.api [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3273245, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.333268] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca7c80bc-e077-4cb9-958c-bd229898aec6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.342263] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80ca269a-14e2-46c9-af15-cba8f848577c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.372981] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-265ba34f-27a2-4bcb-bef1-3f43c81e06c9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.385028] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8da29176-ea8b-4d23-8110-195acfdba173 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.400283] env[69328]: DEBUG nova.compute.provider_tree [None req-cce60ede-7212-4a64-a3f5-72aa1be1edc1 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 800.414013] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "refresh_cache-55f44102-2891-4b6c-b31e-e8255a24d180" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 800.414013] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquired lock "refresh_cache-55f44102-2891-4b6c-b31e-e8255a24d180" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 800.414249] env[69328]: DEBUG nova.network.neutron [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 800.468702] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 800.468798] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-52a61e6d-4476-415b-82e8-ce0930dd89e9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.477053] env[69328]: DEBUG oslo_vmware.api [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 800.477053] env[69328]: value = "task-3273246" [ 800.477053] env[69328]: _type = "Task" [ 800.477053] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.490599] env[69328]: DEBUG oslo_vmware.api [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273246, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.583064] env[69328]: DEBUG oslo_vmware.api [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3273245, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.903156] env[69328]: DEBUG nova.scheduler.client.report [None req-cce60ede-7212-4a64-a3f5-72aa1be1edc1 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 800.949364] env[69328]: DEBUG nova.network.neutron [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 800.988433] env[69328]: DEBUG oslo_vmware.api [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273246, 'name': PowerOffVM_Task, 'duration_secs': 0.210407} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.990823] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 800.991085] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 800.991893] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83443609-b6ae-4214-9bbf-7d4beb104e89 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.999461] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 801.000532] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6b2422cb-df86-4663-a001-8cdfc8b2ae19 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.085778] env[69328]: DEBUG oslo_vmware.api [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3273245, 'name': ReconfigVM_Task, 'duration_secs': 0.677258} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.085930] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Reconfigured VM instance instance-00000032 to attach disk [datastore2] 4d320c76-45bb-451c-8fbb-3dd2d64f56d5/4d320c76-45bb-451c-8fbb-3dd2d64f56d5.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 801.086708] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cfda7e09-4ac4-4e94-b3aa-6f056e0b7fcc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.098532] env[69328]: DEBUG oslo_vmware.api [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for the task: (returnval){ [ 801.098532] env[69328]: value = "task-3273248" [ 801.098532] env[69328]: _type = "Task" [ 801.098532] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.109215] env[69328]: DEBUG oslo_vmware.api [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3273248, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.110643] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 801.111105] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 801.111344] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Deleting the datastore file [datastore1] d10bee67-6294-4537-9ce7-4eedb8361ddc {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 801.111605] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b5fcd883-d263-4556-8520-6012098d5899 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.118956] env[69328]: DEBUG oslo_vmware.api [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 801.118956] env[69328]: value = "task-3273249" [ 801.118956] env[69328]: _type = "Task" [ 801.118956] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.128945] env[69328]: DEBUG oslo_vmware.api [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273249, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.135081] env[69328]: DEBUG nova.network.neutron [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Updating instance_info_cache with network_info: [{"id": "87b2e37a-d778-4bd1-a107-6132378b5f4c", "address": "fa:16:3e:cc:fb:3a", "network": {"id": "ce3bec03-01f9-4ac9-80e4-bfb944c0bb66", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-545997027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87581f423dc64e4fb9fe1d51ebc68597", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87b2e37a-d7", "ovs_interfaceid": "87b2e37a-d778-4bd1-a107-6132378b5f4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 801.407790] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cce60ede-7212-4a64-a3f5-72aa1be1edc1 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.034s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 801.410215] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.709s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 801.413447] env[69328]: INFO nova.compute.claims [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 801.443415] env[69328]: INFO nova.scheduler.client.report [None req-cce60ede-7212-4a64-a3f5-72aa1be1edc1 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Deleted allocations for instance 5292b759-9d1f-486a-b4d6-90519b3ae986 [ 801.609127] env[69328]: DEBUG oslo_vmware.api [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3273248, 'name': Rename_Task, 'duration_secs': 0.196621} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.609482] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 801.609958] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c365d3ba-0eea-4085-b139-78f3c49d1d83 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.618413] env[69328]: DEBUG oslo_vmware.api [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for the task: (returnval){ [ 801.618413] env[69328]: value = "task-3273250" [ 801.618413] env[69328]: _type = "Task" [ 801.618413] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.631520] env[69328]: DEBUG oslo_vmware.api [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273249, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.209707} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.634677] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 801.634933] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 801.635080] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 801.638405] env[69328]: DEBUG oslo_vmware.api [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3273250, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.642383] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Releasing lock "refresh_cache-55f44102-2891-4b6c-b31e-e8255a24d180" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 801.642685] env[69328]: DEBUG nova.compute.manager [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Instance network_info: |[{"id": "87b2e37a-d778-4bd1-a107-6132378b5f4c", "address": "fa:16:3e:cc:fb:3a", "network": {"id": "ce3bec03-01f9-4ac9-80e4-bfb944c0bb66", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-545997027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87581f423dc64e4fb9fe1d51ebc68597", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87b2e37a-d7", "ovs_interfaceid": "87b2e37a-d778-4bd1-a107-6132378b5f4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 801.644351] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cc:fb:3a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1768af3d-3317-4ef5-b484-0c2707d63de7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '87b2e37a-d778-4bd1-a107-6132378b5f4c', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 801.651996] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 801.651996] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 801.652817] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-17ccf1c6-8c04-4b14-8b74-e3e53cedc003 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.676873] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 801.676873] env[69328]: value = "task-3273251" [ 801.676873] env[69328]: _type = "Task" [ 801.676873] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.686375] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273251, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.901310] env[69328]: DEBUG nova.compute.manager [req-38279119-e9ac-48b3-86c2-046f9deb1675 req-cd48764e-ab73-4f99-a227-e6ca0a6f6730 service nova] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Received event network-changed-87b2e37a-d778-4bd1-a107-6132378b5f4c {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 801.901310] env[69328]: DEBUG nova.compute.manager [req-38279119-e9ac-48b3-86c2-046f9deb1675 req-cd48764e-ab73-4f99-a227-e6ca0a6f6730 service nova] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Refreshing instance network info cache due to event network-changed-87b2e37a-d778-4bd1-a107-6132378b5f4c. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 801.901310] env[69328]: DEBUG oslo_concurrency.lockutils [req-38279119-e9ac-48b3-86c2-046f9deb1675 req-cd48764e-ab73-4f99-a227-e6ca0a6f6730 service nova] Acquiring lock "refresh_cache-55f44102-2891-4b6c-b31e-e8255a24d180" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 801.901310] env[69328]: DEBUG oslo_concurrency.lockutils [req-38279119-e9ac-48b3-86c2-046f9deb1675 req-cd48764e-ab73-4f99-a227-e6ca0a6f6730 service nova] Acquired lock "refresh_cache-55f44102-2891-4b6c-b31e-e8255a24d180" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 801.901310] env[69328]: DEBUG nova.network.neutron [req-38279119-e9ac-48b3-86c2-046f9deb1675 req-cd48764e-ab73-4f99-a227-e6ca0a6f6730 service nova] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Refreshing network info cache for port 87b2e37a-d778-4bd1-a107-6132378b5f4c {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 801.953071] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cce60ede-7212-4a64-a3f5-72aa1be1edc1 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Lock "5292b759-9d1f-486a-b4d6-90519b3ae986" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.064s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 802.130356] env[69328]: DEBUG oslo_vmware.api [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3273250, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.192108] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273251, 'name': CreateVM_Task, 'duration_secs': 0.363381} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.192316] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 802.193059] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 802.193243] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 802.193606] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 802.193916] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-beb3974e-5729-4227-8120-1e3cc03ce9ea {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.200653] env[69328]: DEBUG oslo_vmware.api [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 802.200653] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e9649d-e42f-bdff-6846-911776309e42" [ 802.200653] env[69328]: _type = "Task" [ 802.200653] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.210383] env[69328]: DEBUG oslo_vmware.api [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e9649d-e42f-bdff-6846-911776309e42, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.634109] env[69328]: DEBUG oslo_vmware.api [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3273250, 'name': PowerOnVM_Task, 'duration_secs': 0.6204} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.634422] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 802.634619] env[69328]: INFO nova.compute.manager [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Took 8.00 seconds to spawn the instance on the hypervisor. [ 802.635180] env[69328]: DEBUG nova.compute.manager [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 802.636028] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c291bd4a-c72f-4e70-a563-12becfa60720 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.684286] env[69328]: DEBUG nova.virt.hardware [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 802.684855] env[69328]: DEBUG nova.virt.hardware [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 802.684855] env[69328]: DEBUG nova.virt.hardware [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 802.684855] env[69328]: DEBUG nova.virt.hardware [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 802.685159] env[69328]: DEBUG nova.virt.hardware [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 802.685422] env[69328]: DEBUG nova.virt.hardware [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 802.685695] env[69328]: DEBUG nova.virt.hardware [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 802.685854] env[69328]: DEBUG nova.virt.hardware [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 802.686040] env[69328]: DEBUG nova.virt.hardware [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 802.686192] env[69328]: DEBUG nova.virt.hardware [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 802.686363] env[69328]: DEBUG nova.virt.hardware [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 802.687239] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7f81892-d33b-4f77-a3d1-f3f05296236c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.701153] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-285bef17-55b7-457f-a784-51217adbd17b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.722758] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fe:39:34', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'abcf0d10-3f3f-45dc-923e-1c78766e2dad', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7e9163b1-a349-4287-bbfe-8147dc2e52dd', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 802.730043] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 802.736418] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 802.736744] env[69328]: DEBUG oslo_vmware.api [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e9649d-e42f-bdff-6846-911776309e42, 'name': SearchDatastore_Task, 'duration_secs': 0.0201} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.737688] env[69328]: DEBUG nova.network.neutron [req-38279119-e9ac-48b3-86c2-046f9deb1675 req-cd48764e-ab73-4f99-a227-e6ca0a6f6730 service nova] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Updated VIF entry in instance network info cache for port 87b2e37a-d778-4bd1-a107-6132378b5f4c. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 802.740370] env[69328]: DEBUG nova.network.neutron [req-38279119-e9ac-48b3-86c2-046f9deb1675 req-cd48764e-ab73-4f99-a227-e6ca0a6f6730 service nova] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Updating instance_info_cache with network_info: [{"id": "87b2e37a-d778-4bd1-a107-6132378b5f4c", "address": "fa:16:3e:cc:fb:3a", "network": {"id": "ce3bec03-01f9-4ac9-80e4-bfb944c0bb66", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-545997027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87581f423dc64e4fb9fe1d51ebc68597", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87b2e37a-d7", "ovs_interfaceid": "87b2e37a-d778-4bd1-a107-6132378b5f4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 802.740370] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a89dacb0-c764-4d5c-bc55-12b81275a779 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.753807] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 802.754075] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 802.754497] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 802.754497] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 802.754723] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 802.758091] env[69328]: DEBUG oslo_concurrency.lockutils [req-38279119-e9ac-48b3-86c2-046f9deb1675 req-cd48764e-ab73-4f99-a227-e6ca0a6f6730 service nova] Releasing lock "refresh_cache-55f44102-2891-4b6c-b31e-e8255a24d180" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 802.758479] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e2302ef4-8178-4e55-b1d2-10bb367550b3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.767560] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 802.767560] env[69328]: value = "task-3273252" [ 802.767560] env[69328]: _type = "Task" [ 802.767560] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.769073] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 802.769294] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 802.773386] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-78399994-bf3e-41c0-8dae-a4eacb99e250 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.783919] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273252, 'name': CreateVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.785340] env[69328]: DEBUG oslo_vmware.api [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 802.785340] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d4a0f0-d6f0-a918-6b92-283d38f95a65" [ 802.785340] env[69328]: _type = "Task" [ 802.785340] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.798837] env[69328]: DEBUG oslo_vmware.api [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d4a0f0-d6f0-a918-6b92-283d38f95a65, 'name': SearchDatastore_Task, 'duration_secs': 0.01204} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.799816] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb10f745-921d-4853-b1dd-8bfddc242cdc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.807198] env[69328]: DEBUG oslo_vmware.api [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 802.807198] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5258eb59-d12e-93f6-82a3-bd18aa838b99" [ 802.807198] env[69328]: _type = "Task" [ 802.807198] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.817370] env[69328]: DEBUG oslo_vmware.api [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5258eb59-d12e-93f6-82a3-bd18aa838b99, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.910195] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f114533f-c51a-4b99-b0fd-98e3b94ce6ea {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.918806] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cb7de47-45eb-4866-9d33-330d3d1f1052 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.950492] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b362659-2e4e-49fc-82ec-7c5e91b25ede {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.958585] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4113bb2-a022-4720-8294-43a7c5a16b5d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.973023] env[69328]: DEBUG nova.compute.provider_tree [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 803.158259] env[69328]: INFO nova.compute.manager [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Took 38.63 seconds to build instance. [ 803.280347] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273252, 'name': CreateVM_Task, 'duration_secs': 0.340488} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.280535] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 803.281250] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 803.281421] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 803.281898] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 803.281989] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5fd8ed14-c856-40cd-8636-cafb853bba6d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.286869] env[69328]: DEBUG oslo_vmware.api [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 803.286869] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b890d5-eaee-3533-b99d-5a0dcf473ab5" [ 803.286869] env[69328]: _type = "Task" [ 803.286869] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.295467] env[69328]: DEBUG oslo_vmware.api [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b890d5-eaee-3533-b99d-5a0dcf473ab5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.317143] env[69328]: DEBUG oslo_vmware.api [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5258eb59-d12e-93f6-82a3-bd18aa838b99, 'name': SearchDatastore_Task, 'duration_secs': 0.011403} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.317445] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 803.317694] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 55f44102-2891-4b6c-b31e-e8255a24d180/55f44102-2891-4b6c-b31e-e8255a24d180.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 803.317957] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e9d7716f-10e8-4e1a-ac15-99be6acad434 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.324848] env[69328]: DEBUG oslo_vmware.api [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 803.324848] env[69328]: value = "task-3273253" [ 803.324848] env[69328]: _type = "Task" [ 803.324848] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.333111] env[69328]: DEBUG oslo_vmware.api [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273253, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.478528] env[69328]: DEBUG nova.scheduler.client.report [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 803.547207] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9cbdacf4-f698-4c2d-b07c-3d109d551393 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Acquiring lock "afa25f89-ccda-4b77-aaa1-a3b62b53d870" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 803.547516] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9cbdacf4-f698-4c2d-b07c-3d109d551393 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Lock "afa25f89-ccda-4b77-aaa1-a3b62b53d870" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 803.547734] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9cbdacf4-f698-4c2d-b07c-3d109d551393 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Acquiring lock "afa25f89-ccda-4b77-aaa1-a3b62b53d870-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 803.547944] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9cbdacf4-f698-4c2d-b07c-3d109d551393 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Lock "afa25f89-ccda-4b77-aaa1-a3b62b53d870-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 803.548128] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9cbdacf4-f698-4c2d-b07c-3d109d551393 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Lock "afa25f89-ccda-4b77-aaa1-a3b62b53d870-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 803.551150] env[69328]: INFO nova.compute.manager [None req-9cbdacf4-f698-4c2d-b07c-3d109d551393 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: afa25f89-ccda-4b77-aaa1-a3b62b53d870] Terminating instance [ 803.623505] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3d59153c-ee16-4079-8e43-7b01f83b8074 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Acquiring lock "690096cf-a0bd-4db1-ad97-8d8a37ad7c84" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 803.623860] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3d59153c-ee16-4079-8e43-7b01f83b8074 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Lock "690096cf-a0bd-4db1-ad97-8d8a37ad7c84" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 803.624113] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3d59153c-ee16-4079-8e43-7b01f83b8074 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Acquiring lock "690096cf-a0bd-4db1-ad97-8d8a37ad7c84-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 803.624330] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3d59153c-ee16-4079-8e43-7b01f83b8074 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Lock "690096cf-a0bd-4db1-ad97-8d8a37ad7c84-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 803.624530] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3d59153c-ee16-4079-8e43-7b01f83b8074 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Lock "690096cf-a0bd-4db1-ad97-8d8a37ad7c84-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 803.626907] env[69328]: INFO nova.compute.manager [None req-3d59153c-ee16-4079-8e43-7b01f83b8074 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] Terminating instance [ 803.662256] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5bf3a9bc-b8a4-4c07-9ef3-1d0d8a5ec775 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "4d320c76-45bb-451c-8fbb-3dd2d64f56d5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 80.503s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 803.797556] env[69328]: DEBUG oslo_vmware.api [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b890d5-eaee-3533-b99d-5a0dcf473ab5, 'name': SearchDatastore_Task, 'duration_secs': 0.010679} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.797856] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 803.798149] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 803.798401] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 803.798728] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 803.798837] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 803.799107] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1499c24b-6d13-47e0-a3f5-86d10ab5d1bc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.808623] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 803.808811] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 803.809553] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f6c74779-9681-4bd1-96df-73e2eda9a1c8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.815393] env[69328]: DEBUG oslo_vmware.api [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 803.815393] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f338fc-0e21-4fbc-b012-e1857d26a91d" [ 803.815393] env[69328]: _type = "Task" [ 803.815393] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.825719] env[69328]: DEBUG oslo_vmware.api [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f338fc-0e21-4fbc-b012-e1857d26a91d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.833891] env[69328]: DEBUG oslo_vmware.api [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273253, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.477988} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.834150] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 55f44102-2891-4b6c-b31e-e8255a24d180/55f44102-2891-4b6c-b31e-e8255a24d180.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 803.834380] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 803.834637] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-01333a97-a997-443b-b587-a8fdf8db816a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.843123] env[69328]: DEBUG oslo_vmware.api [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 803.843123] env[69328]: value = "task-3273254" [ 803.843123] env[69328]: _type = "Task" [ 803.843123] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.851675] env[69328]: DEBUG oslo_vmware.api [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273254, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.985057] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.574s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 803.985684] env[69328]: DEBUG nova.compute.manager [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 803.989299] env[69328]: DEBUG oslo_concurrency.lockutils [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.154s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 803.990702] env[69328]: INFO nova.compute.claims [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 804.034584] env[69328]: DEBUG nova.compute.manager [req-46dbd7ec-5d3e-4eca-95d8-51ef3748ffb8 req-275c1f55-7532-4cce-b503-cc0ff78bb2e5 service nova] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Received event network-changed-f11b7e60-0d64-4eba-a305-c8a67f80d4b8 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 804.034584] env[69328]: DEBUG nova.compute.manager [req-46dbd7ec-5d3e-4eca-95d8-51ef3748ffb8 req-275c1f55-7532-4cce-b503-cc0ff78bb2e5 service nova] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Refreshing instance network info cache due to event network-changed-f11b7e60-0d64-4eba-a305-c8a67f80d4b8. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 804.034584] env[69328]: DEBUG oslo_concurrency.lockutils [req-46dbd7ec-5d3e-4eca-95d8-51ef3748ffb8 req-275c1f55-7532-4cce-b503-cc0ff78bb2e5 service nova] Acquiring lock "refresh_cache-4d320c76-45bb-451c-8fbb-3dd2d64f56d5" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 804.034584] env[69328]: DEBUG oslo_concurrency.lockutils [req-46dbd7ec-5d3e-4eca-95d8-51ef3748ffb8 req-275c1f55-7532-4cce-b503-cc0ff78bb2e5 service nova] Acquired lock "refresh_cache-4d320c76-45bb-451c-8fbb-3dd2d64f56d5" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 804.034967] env[69328]: DEBUG nova.network.neutron [req-46dbd7ec-5d3e-4eca-95d8-51ef3748ffb8 req-275c1f55-7532-4cce-b503-cc0ff78bb2e5 service nova] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Refreshing network info cache for port f11b7e60-0d64-4eba-a305-c8a67f80d4b8 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 804.061476] env[69328]: DEBUG nova.compute.manager [None req-9cbdacf4-f698-4c2d-b07c-3d109d551393 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: afa25f89-ccda-4b77-aaa1-a3b62b53d870] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 804.061476] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9cbdacf4-f698-4c2d-b07c-3d109d551393 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: afa25f89-ccda-4b77-aaa1-a3b62b53d870] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 804.061476] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2f84258-bda9-4d48-b249-9acbc07fc883 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.072979] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cbdacf4-f698-4c2d-b07c-3d109d551393 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: afa25f89-ccda-4b77-aaa1-a3b62b53d870] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 804.073618] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2fa6e896-16be-49f0-ad5f-8b3431e2d8fd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.081765] env[69328]: DEBUG oslo_vmware.api [None req-9cbdacf4-f698-4c2d-b07c-3d109d551393 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Waiting for the task: (returnval){ [ 804.081765] env[69328]: value = "task-3273255" [ 804.081765] env[69328]: _type = "Task" [ 804.081765] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.090628] env[69328]: DEBUG oslo_vmware.api [None req-9cbdacf4-f698-4c2d-b07c-3d109d551393 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': task-3273255, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.131591] env[69328]: DEBUG nova.compute.manager [None req-3d59153c-ee16-4079-8e43-7b01f83b8074 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 804.131806] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3d59153c-ee16-4079-8e43-7b01f83b8074 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 804.132916] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfe5b2e2-3333-46cb-a48d-4cd50dd6d14e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.146309] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d59153c-ee16-4079-8e43-7b01f83b8074 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 804.146547] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-03f05783-7ab1-4308-861d-aa1f3bd62633 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.153978] env[69328]: DEBUG oslo_vmware.api [None req-3d59153c-ee16-4079-8e43-7b01f83b8074 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Waiting for the task: (returnval){ [ 804.153978] env[69328]: value = "task-3273256" [ 804.153978] env[69328]: _type = "Task" [ 804.153978] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.162549] env[69328]: DEBUG oslo_vmware.api [None req-3d59153c-ee16-4079-8e43-7b01f83b8074 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': task-3273256, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.167245] env[69328]: DEBUG nova.compute.manager [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 804.326448] env[69328]: DEBUG oslo_vmware.api [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f338fc-0e21-4fbc-b012-e1857d26a91d, 'name': SearchDatastore_Task, 'duration_secs': 0.010407} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.327340] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b5be71a3-dee6-418b-834e-856f8e6d2ece {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.333413] env[69328]: DEBUG oslo_vmware.api [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 804.333413] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d6985f-8250-7818-2b13-514753f40de4" [ 804.333413] env[69328]: _type = "Task" [ 804.333413] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.341480] env[69328]: DEBUG oslo_vmware.api [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d6985f-8250-7818-2b13-514753f40de4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.360774] env[69328]: DEBUG oslo_vmware.api [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273254, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069114} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.363075] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 804.364026] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa9b9fcb-463e-47f7-be59-d311537668b7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.389438] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Reconfiguring VM instance instance-00000033 to attach disk [datastore2] 55f44102-2891-4b6c-b31e-e8255a24d180/55f44102-2891-4b6c-b31e-e8255a24d180.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 804.390105] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4265a0a2-3c9e-4a21-a433-494bfd679c29 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.412606] env[69328]: DEBUG oslo_vmware.api [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 804.412606] env[69328]: value = "task-3273257" [ 804.412606] env[69328]: _type = "Task" [ 804.412606] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.422304] env[69328]: DEBUG oslo_vmware.api [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273257, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.497246] env[69328]: DEBUG nova.compute.utils [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 804.501421] env[69328]: DEBUG nova.compute.manager [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 804.501613] env[69328]: DEBUG nova.network.neutron [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 804.566967] env[69328]: DEBUG nova.policy [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '821ff43b57bc49d59577880034311de6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '83d21e3b1459418d86eb52fbb6bb0889', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 804.596802] env[69328]: DEBUG oslo_vmware.api [None req-9cbdacf4-f698-4c2d-b07c-3d109d551393 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': task-3273255, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.664728] env[69328]: DEBUG oslo_vmware.api [None req-3d59153c-ee16-4079-8e43-7b01f83b8074 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': task-3273256, 'name': PowerOffVM_Task, 'duration_secs': 0.496379} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.665043] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d59153c-ee16-4079-8e43-7b01f83b8074 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 804.665244] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3d59153c-ee16-4079-8e43-7b01f83b8074 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 804.665509] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f3489fd7-190b-443a-b21a-0b00a1814ac9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.696821] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 804.757706] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3d59153c-ee16-4079-8e43-7b01f83b8074 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 804.757982] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3d59153c-ee16-4079-8e43-7b01f83b8074 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 804.758135] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d59153c-ee16-4079-8e43-7b01f83b8074 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Deleting the datastore file [datastore2] 690096cf-a0bd-4db1-ad97-8d8a37ad7c84 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 804.758368] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9736063e-9856-48d9-ac11-9f1866142101 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.766854] env[69328]: DEBUG oslo_vmware.api [None req-3d59153c-ee16-4079-8e43-7b01f83b8074 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Waiting for the task: (returnval){ [ 804.766854] env[69328]: value = "task-3273259" [ 804.766854] env[69328]: _type = "Task" [ 804.766854] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.776339] env[69328]: DEBUG oslo_vmware.api [None req-3d59153c-ee16-4079-8e43-7b01f83b8074 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': task-3273259, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.849884] env[69328]: DEBUG oslo_vmware.api [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d6985f-8250-7818-2b13-514753f40de4, 'name': SearchDatastore_Task, 'duration_secs': 0.009978} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.849884] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 804.849884] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] d10bee67-6294-4537-9ce7-4eedb8361ddc/d10bee67-6294-4537-9ce7-4eedb8361ddc.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 804.850044] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eef96bed-ab52-41a8-9f46-6431b64e29e7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.858397] env[69328]: DEBUG oslo_vmware.api [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 804.858397] env[69328]: value = "task-3273260" [ 804.858397] env[69328]: _type = "Task" [ 804.858397] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.868173] env[69328]: DEBUG oslo_vmware.api [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273260, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.919876] env[69328]: DEBUG nova.network.neutron [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] Successfully created port: e638f147-afff-45b0-bf3e-f63debcc2d53 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 804.926873] env[69328]: DEBUG oslo_vmware.api [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273257, 'name': ReconfigVM_Task, 'duration_secs': 0.297879} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.927319] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Reconfigured VM instance instance-00000033 to attach disk [datastore2] 55f44102-2891-4b6c-b31e-e8255a24d180/55f44102-2891-4b6c-b31e-e8255a24d180.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 804.928059] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2fce2b21-cf35-4352-92b3-38158f2f3692 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.938374] env[69328]: DEBUG oslo_vmware.api [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 804.938374] env[69328]: value = "task-3273261" [ 804.938374] env[69328]: _type = "Task" [ 804.938374] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.948254] env[69328]: DEBUG oslo_vmware.api [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273261, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.967390] env[69328]: DEBUG nova.network.neutron [req-46dbd7ec-5d3e-4eca-95d8-51ef3748ffb8 req-275c1f55-7532-4cce-b503-cc0ff78bb2e5 service nova] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Updated VIF entry in instance network info cache for port f11b7e60-0d64-4eba-a305-c8a67f80d4b8. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 804.967846] env[69328]: DEBUG nova.network.neutron [req-46dbd7ec-5d3e-4eca-95d8-51ef3748ffb8 req-275c1f55-7532-4cce-b503-cc0ff78bb2e5 service nova] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Updating instance_info_cache with network_info: [{"id": "f11b7e60-0d64-4eba-a305-c8a67f80d4b8", "address": "fa:16:3e:8d:96:4e", "network": {"id": "2e8bdd1b-0cca-4f7c-bba3-ff1750073020", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2058593014-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.211", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "088bc9e3aeb449baa0a522342d57d183", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf11b7e60-0d", "ovs_interfaceid": "f11b7e60-0d64-4eba-a305-c8a67f80d4b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 805.002280] env[69328]: DEBUG nova.compute.manager [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 805.097946] env[69328]: DEBUG oslo_vmware.api [None req-9cbdacf4-f698-4c2d-b07c-3d109d551393 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': task-3273255, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.279361] env[69328]: DEBUG oslo_vmware.api [None req-3d59153c-ee16-4079-8e43-7b01f83b8074 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': task-3273259, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160496} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.279702] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d59153c-ee16-4079-8e43-7b01f83b8074 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 805.280143] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3d59153c-ee16-4079-8e43-7b01f83b8074 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 805.280143] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3d59153c-ee16-4079-8e43-7b01f83b8074 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 805.280613] env[69328]: INFO nova.compute.manager [None req-3d59153c-ee16-4079-8e43-7b01f83b8074 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] Took 1.15 seconds to destroy the instance on the hypervisor. [ 805.280840] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3d59153c-ee16-4079-8e43-7b01f83b8074 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 805.283531] env[69328]: DEBUG nova.compute.manager [-] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 805.284361] env[69328]: DEBUG nova.network.neutron [-] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 805.370387] env[69328]: DEBUG oslo_vmware.api [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273260, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.451739] env[69328]: DEBUG oslo_vmware.api [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273261, 'name': Rename_Task, 'duration_secs': 0.180022} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.452030] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 805.452300] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b80c9f42-1d1c-4fc3-b5bf-c9c2e9dcb9bd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.460289] env[69328]: DEBUG oslo_vmware.api [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 805.460289] env[69328]: value = "task-3273262" [ 805.460289] env[69328]: _type = "Task" [ 805.460289] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.469577] env[69328]: DEBUG oslo_vmware.api [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273262, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.472934] env[69328]: DEBUG oslo_concurrency.lockutils [req-46dbd7ec-5d3e-4eca-95d8-51ef3748ffb8 req-275c1f55-7532-4cce-b503-cc0ff78bb2e5 service nova] Releasing lock "refresh_cache-4d320c76-45bb-451c-8fbb-3dd2d64f56d5" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 805.536414] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5ee868b-a389-4294-ba84-3801b53fffc5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.545318] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc5c0da4-f16e-444f-a71e-127c11966ea1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.591085] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-893d30c8-1294-4a90-8b7e-8ae05eb62093 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.602671] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7702983-0e07-4373-a9b7-5f2b394161ad {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.607362] env[69328]: DEBUG oslo_vmware.api [None req-9cbdacf4-f698-4c2d-b07c-3d109d551393 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': task-3273255, 'name': PowerOffVM_Task, 'duration_secs': 1.141869} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.607622] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cbdacf4-f698-4c2d-b07c-3d109d551393 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: afa25f89-ccda-4b77-aaa1-a3b62b53d870] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 805.608411] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9cbdacf4-f698-4c2d-b07c-3d109d551393 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: afa25f89-ccda-4b77-aaa1-a3b62b53d870] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 805.608606] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4c6ea4ef-931a-45af-a8b5-c2e557bc5f89 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.619167] env[69328]: DEBUG nova.compute.provider_tree [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 805.680333] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9cbdacf4-f698-4c2d-b07c-3d109d551393 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: afa25f89-ccda-4b77-aaa1-a3b62b53d870] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 805.680670] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9cbdacf4-f698-4c2d-b07c-3d109d551393 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: afa25f89-ccda-4b77-aaa1-a3b62b53d870] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 805.680746] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cbdacf4-f698-4c2d-b07c-3d109d551393 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Deleting the datastore file [datastore2] afa25f89-ccda-4b77-aaa1-a3b62b53d870 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 805.681297] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3df51ee3-5013-4009-93da-c7dd63c151e8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.689891] env[69328]: DEBUG oslo_vmware.api [None req-9cbdacf4-f698-4c2d-b07c-3d109d551393 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Waiting for the task: (returnval){ [ 805.689891] env[69328]: value = "task-3273264" [ 805.689891] env[69328]: _type = "Task" [ 805.689891] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.700491] env[69328]: DEBUG oslo_vmware.api [None req-9cbdacf4-f698-4c2d-b07c-3d109d551393 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': task-3273264, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.759969] env[69328]: DEBUG nova.compute.manager [req-1557b984-787c-4e30-a96d-e887677ef774 req-f4f0949d-dd89-4337-9d95-113ee24dbf8f service nova] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] Received event network-vif-deleted-7f5ae309-7210-4bdd-8fd5-67e654004662 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 805.760968] env[69328]: INFO nova.compute.manager [req-1557b984-787c-4e30-a96d-e887677ef774 req-f4f0949d-dd89-4337-9d95-113ee24dbf8f service nova] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] Neutron deleted interface 7f5ae309-7210-4bdd-8fd5-67e654004662; detaching it from the instance and deleting it from the info cache [ 805.760968] env[69328]: DEBUG nova.network.neutron [req-1557b984-787c-4e30-a96d-e887677ef774 req-f4f0949d-dd89-4337-9d95-113ee24dbf8f service nova] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 805.869984] env[69328]: DEBUG oslo_vmware.api [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273260, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.594655} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.870365] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] d10bee67-6294-4537-9ce7-4eedb8361ddc/d10bee67-6294-4537-9ce7-4eedb8361ddc.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 805.870607] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 805.870849] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cff8c8dc-66b1-4b13-b32d-9ed1f1d6dbd6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.878782] env[69328]: DEBUG oslo_vmware.api [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 805.878782] env[69328]: value = "task-3273265" [ 805.878782] env[69328]: _type = "Task" [ 805.878782] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.887375] env[69328]: DEBUG oslo_vmware.api [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273265, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.974054] env[69328]: DEBUG oslo_vmware.api [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273262, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.019825] env[69328]: DEBUG nova.compute.manager [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 806.052727] env[69328]: DEBUG nova.virt.hardware [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 806.052727] env[69328]: DEBUG nova.virt.hardware [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 806.052727] env[69328]: DEBUG nova.virt.hardware [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 806.052957] env[69328]: DEBUG nova.virt.hardware [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 806.052957] env[69328]: DEBUG nova.virt.hardware [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 806.052957] env[69328]: DEBUG nova.virt.hardware [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 806.052957] env[69328]: DEBUG nova.virt.hardware [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 806.052957] env[69328]: DEBUG nova.virt.hardware [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 806.053208] env[69328]: DEBUG nova.virt.hardware [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 806.057168] env[69328]: DEBUG nova.virt.hardware [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 806.057168] env[69328]: DEBUG nova.virt.hardware [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 806.058018] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4caa60b2-8991-47f4-b0b9-a4d3e0f8f10d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.070241] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d35a194-ecf2-4f36-841e-00440a27b50e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.125186] env[69328]: DEBUG nova.scheduler.client.report [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 806.203914] env[69328]: DEBUG oslo_vmware.api [None req-9cbdacf4-f698-4c2d-b07c-3d109d551393 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Task: {'id': task-3273264, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.149391} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.204238] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cbdacf4-f698-4c2d-b07c-3d109d551393 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 806.204425] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9cbdacf4-f698-4c2d-b07c-3d109d551393 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: afa25f89-ccda-4b77-aaa1-a3b62b53d870] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 806.204599] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9cbdacf4-f698-4c2d-b07c-3d109d551393 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: afa25f89-ccda-4b77-aaa1-a3b62b53d870] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 806.204959] env[69328]: INFO nova.compute.manager [None req-9cbdacf4-f698-4c2d-b07c-3d109d551393 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] [instance: afa25f89-ccda-4b77-aaa1-a3b62b53d870] Took 2.15 seconds to destroy the instance on the hypervisor. [ 806.205063] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9cbdacf4-f698-4c2d-b07c-3d109d551393 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 806.205237] env[69328]: DEBUG nova.compute.manager [-] [instance: afa25f89-ccda-4b77-aaa1-a3b62b53d870] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 806.205323] env[69328]: DEBUG nova.network.neutron [-] [instance: afa25f89-ccda-4b77-aaa1-a3b62b53d870] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 806.240742] env[69328]: DEBUG nova.network.neutron [-] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 806.264192] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e5bd7640-ec4f-4354-9573-8b6151f97a24 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.275967] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23b92aff-b1b0-423a-bd13-e3e635e321ea {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.313675] env[69328]: DEBUG nova.compute.manager [req-1557b984-787c-4e30-a96d-e887677ef774 req-f4f0949d-dd89-4337-9d95-113ee24dbf8f service nova] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] Detach interface failed, port_id=7f5ae309-7210-4bdd-8fd5-67e654004662, reason: Instance 690096cf-a0bd-4db1-ad97-8d8a37ad7c84 could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 806.391461] env[69328]: DEBUG oslo_vmware.api [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273265, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076606} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.391804] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 806.392680] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cebac9f8-3dea-4b53-88ff-d68ba10edfc7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.418689] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Reconfiguring VM instance instance-00000031 to attach disk [datastore1] d10bee67-6294-4537-9ce7-4eedb8361ddc/d10bee67-6294-4537-9ce7-4eedb8361ddc.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 806.420221] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b0f2b987-2749-4ee6-8302-457220fe6cda {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.443544] env[69328]: DEBUG oslo_vmware.api [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 806.443544] env[69328]: value = "task-3273266" [ 806.443544] env[69328]: _type = "Task" [ 806.443544] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.450108] env[69328]: DEBUG oslo_vmware.api [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273266, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.473430] env[69328]: DEBUG oslo_vmware.api [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273262, 'name': PowerOnVM_Task, 'duration_secs': 0.56158} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.473759] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 806.473974] env[69328]: INFO nova.compute.manager [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Took 7.18 seconds to spawn the instance on the hypervisor. [ 806.474176] env[69328]: DEBUG nova.compute.manager [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 806.474985] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c259679f-0809-47b5-b8fd-c4f6c9bdd924 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.631010] env[69328]: DEBUG oslo_concurrency.lockutils [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.642s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 806.632366] env[69328]: DEBUG nova.compute.manager [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 806.636247] env[69328]: DEBUG oslo_concurrency.lockutils [None req-44f3ab11-d601-4dcc-8461-7aed79509981 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.507s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 806.636247] env[69328]: DEBUG nova.objects.instance [None req-44f3ab11-d601-4dcc-8461-7aed79509981 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Lazy-loading 'resources' on Instance uuid 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 806.745592] env[69328]: INFO nova.compute.manager [-] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] Took 1.46 seconds to deallocate network for instance. [ 806.758470] env[69328]: DEBUG nova.compute.manager [req-14ef4ccd-2317-4f92-9410-ff6631379186 req-d0c54161-e966-4d64-87d5-f774576ee60d service nova] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] Received event network-vif-plugged-e638f147-afff-45b0-bf3e-f63debcc2d53 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 806.758845] env[69328]: DEBUG oslo_concurrency.lockutils [req-14ef4ccd-2317-4f92-9410-ff6631379186 req-d0c54161-e966-4d64-87d5-f774576ee60d service nova] Acquiring lock "07b1f872-02bc-471f-97d6-3a781075bee5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 806.759118] env[69328]: DEBUG oslo_concurrency.lockutils [req-14ef4ccd-2317-4f92-9410-ff6631379186 req-d0c54161-e966-4d64-87d5-f774576ee60d service nova] Lock "07b1f872-02bc-471f-97d6-3a781075bee5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 806.759407] env[69328]: DEBUG oslo_concurrency.lockutils [req-14ef4ccd-2317-4f92-9410-ff6631379186 req-d0c54161-e966-4d64-87d5-f774576ee60d service nova] Lock "07b1f872-02bc-471f-97d6-3a781075bee5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 806.760171] env[69328]: DEBUG nova.compute.manager [req-14ef4ccd-2317-4f92-9410-ff6631379186 req-d0c54161-e966-4d64-87d5-f774576ee60d service nova] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] No waiting events found dispatching network-vif-plugged-e638f147-afff-45b0-bf3e-f63debcc2d53 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 806.760269] env[69328]: WARNING nova.compute.manager [req-14ef4ccd-2317-4f92-9410-ff6631379186 req-d0c54161-e966-4d64-87d5-f774576ee60d service nova] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] Received unexpected event network-vif-plugged-e638f147-afff-45b0-bf3e-f63debcc2d53 for instance with vm_state building and task_state spawning. [ 806.953710] env[69328]: DEBUG oslo_vmware.api [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273266, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.001965] env[69328]: INFO nova.compute.manager [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Took 36.06 seconds to build instance. [ 807.139407] env[69328]: DEBUG nova.compute.utils [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 807.145459] env[69328]: DEBUG nova.compute.manager [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 807.145677] env[69328]: DEBUG nova.network.neutron [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 807.177621] env[69328]: DEBUG nova.network.neutron [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] Successfully updated port: e638f147-afff-45b0-bf3e-f63debcc2d53 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 807.196864] env[69328]: DEBUG nova.network.neutron [-] [instance: afa25f89-ccda-4b77-aaa1-a3b62b53d870] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 807.242735] env[69328]: DEBUG nova.policy [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6136d0ef4ca5442eac04fb1193d1fca1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9a0fbd7db3ef4a8bbd6799ef5177f25f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 807.251646] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3d59153c-ee16-4079-8e43-7b01f83b8074 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 807.453663] env[69328]: DEBUG oslo_vmware.api [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273266, 'name': ReconfigVM_Task, 'duration_secs': 0.55105} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.456414] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Reconfigured VM instance instance-00000031 to attach disk [datastore1] d10bee67-6294-4537-9ce7-4eedb8361ddc/d10bee67-6294-4537-9ce7-4eedb8361ddc.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 807.456414] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c3bb8a3d-28b2-4416-bfaa-bdd852f724cb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.465406] env[69328]: DEBUG oslo_vmware.api [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 807.465406] env[69328]: value = "task-3273267" [ 807.465406] env[69328]: _type = "Task" [ 807.465406] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.481835] env[69328]: DEBUG oslo_vmware.api [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273267, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.505781] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3974d7ae-ec59-47bc-9f43-2525f1ea9ccf tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "55f44102-2891-4b6c-b31e-e8255a24d180" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 80.605s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 807.644964] env[69328]: DEBUG nova.compute.manager [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 807.684110] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Acquiring lock "refresh_cache-07b1f872-02bc-471f-97d6-3a781075bee5" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 807.684110] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Acquired lock "refresh_cache-07b1f872-02bc-471f-97d6-3a781075bee5" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 807.684110] env[69328]: DEBUG nova.network.neutron [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 807.698845] env[69328]: INFO nova.compute.manager [-] [instance: afa25f89-ccda-4b77-aaa1-a3b62b53d870] Took 1.49 seconds to deallocate network for instance. [ 807.725513] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04c4fb00-0527-4f13-92b9-2da018acd71b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.732065] env[69328]: DEBUG nova.network.neutron [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] Successfully created port: 54919328-7a97-481c-bd6d-056207108b76 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 807.736107] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49883231-17b6-4f04-8d9a-5001212fc896 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.774097] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81bf75f5-e510-4b0b-890b-0d8e43c60db3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.784085] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f831c2b9-a5ed-42bf-aaf3-9778de1ff3c3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.802157] env[69328]: DEBUG nova.compute.provider_tree [None req-44f3ab11-d601-4dcc-8461-7aed79509981 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 807.956571] env[69328]: DEBUG nova.compute.manager [req-d86f0940-907a-4d23-abaa-69932a9d9437 req-96ebfc50-d037-49c0-a7cf-56a5fd21ce25 service nova] [instance: afa25f89-ccda-4b77-aaa1-a3b62b53d870] Received event network-vif-deleted-b4eaba7b-0335-4344-94ab-94e9d7a355a2 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 807.977532] env[69328]: DEBUG oslo_vmware.api [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273267, 'name': Rename_Task, 'duration_secs': 0.227108} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.977532] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 807.977532] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7e7d83cd-4a0f-4f80-a116-855b26e410c5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.984835] env[69328]: DEBUG oslo_vmware.api [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 807.984835] env[69328]: value = "task-3273268" [ 807.984835] env[69328]: _type = "Task" [ 807.984835] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.995021] env[69328]: DEBUG oslo_vmware.api [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273268, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.009211] env[69328]: DEBUG nova.compute.manager [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 808.208032] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9cbdacf4-f698-4c2d-b07c-3d109d551393 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 808.245149] env[69328]: DEBUG nova.network.neutron [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 808.305667] env[69328]: DEBUG nova.scheduler.client.report [None req-44f3ab11-d601-4dcc-8461-7aed79509981 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 808.407137] env[69328]: DEBUG nova.network.neutron [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] Updating instance_info_cache with network_info: [{"id": "e638f147-afff-45b0-bf3e-f63debcc2d53", "address": "fa:16:3e:fe:7f:e3", "network": {"id": "1b79a3b5-fe38-457c-a9e1-5923af4edbf1", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1093544796-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83d21e3b1459418d86eb52fbb6bb0889", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40859343-2baa-45fd-88e3-ebf8aaed2b19", "external-id": "nsx-vlan-transportzone-10", "segmentation_id": 10, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape638f147-af", "ovs_interfaceid": "e638f147-afff-45b0-bf3e-f63debcc2d53", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 808.428542] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3ec12dc2-be2a-4370-aa34-03498ef00a89 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "55f44102-2891-4b6c-b31e-e8255a24d180" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 808.428913] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3ec12dc2-be2a-4370-aa34-03498ef00a89 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "55f44102-2891-4b6c-b31e-e8255a24d180" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 808.428913] env[69328]: DEBUG nova.compute.manager [None req-3ec12dc2-be2a-4370-aa34-03498ef00a89 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 808.429807] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3a3711c-58ac-40be-9a73-6866dd6a7d84 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.438797] env[69328]: DEBUG nova.compute.manager [None req-3ec12dc2-be2a-4370-aa34-03498ef00a89 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69328) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 808.439481] env[69328]: DEBUG nova.objects.instance [None req-3ec12dc2-be2a-4370-aa34-03498ef00a89 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lazy-loading 'flavor' on Instance uuid 55f44102-2891-4b6c-b31e-e8255a24d180 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 808.501411] env[69328]: DEBUG oslo_vmware.api [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273268, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.536709] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 808.655793] env[69328]: DEBUG nova.compute.manager [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 808.683016] env[69328]: DEBUG nova.virt.hardware [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 808.683193] env[69328]: DEBUG nova.virt.hardware [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 808.683354] env[69328]: DEBUG nova.virt.hardware [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 808.683574] env[69328]: DEBUG nova.virt.hardware [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 808.683674] env[69328]: DEBUG nova.virt.hardware [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 808.683819] env[69328]: DEBUG nova.virt.hardware [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 808.684038] env[69328]: DEBUG nova.virt.hardware [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 808.684202] env[69328]: DEBUG nova.virt.hardware [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 808.684371] env[69328]: DEBUG nova.virt.hardware [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 808.684532] env[69328]: DEBUG nova.virt.hardware [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 808.684703] env[69328]: DEBUG nova.virt.hardware [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 808.685616] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0a43d4f-d5dd-4505-860e-a8a2c1733189 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.695117] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cb44ac6-a706-436e-99e5-d395e55f26b0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.810894] env[69328]: DEBUG oslo_concurrency.lockutils [None req-44f3ab11-d601-4dcc-8461-7aed79509981 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.175s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 808.813787] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.322s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 808.814920] env[69328]: INFO nova.compute.claims [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 808.833126] env[69328]: INFO nova.scheduler.client.report [None req-44f3ab11-d601-4dcc-8461-7aed79509981 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Deleted allocations for instance 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29 [ 808.912528] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Releasing lock "refresh_cache-07b1f872-02bc-471f-97d6-3a781075bee5" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 808.913134] env[69328]: DEBUG nova.compute.manager [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] Instance network_info: |[{"id": "e638f147-afff-45b0-bf3e-f63debcc2d53", "address": "fa:16:3e:fe:7f:e3", "network": {"id": "1b79a3b5-fe38-457c-a9e1-5923af4edbf1", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1093544796-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83d21e3b1459418d86eb52fbb6bb0889", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40859343-2baa-45fd-88e3-ebf8aaed2b19", "external-id": "nsx-vlan-transportzone-10", "segmentation_id": 10, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape638f147-af", "ovs_interfaceid": "e638f147-afff-45b0-bf3e-f63debcc2d53", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 808.913574] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fe:7f:e3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '40859343-2baa-45fd-88e3-ebf8aaed2b19', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e638f147-afff-45b0-bf3e-f63debcc2d53', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 808.922290] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Creating folder: Project (83d21e3b1459418d86eb52fbb6bb0889). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 808.922969] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-113b35ac-e1d4-4150-a0da-8a1c748c9b20 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.936684] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Created folder: Project (83d21e3b1459418d86eb52fbb6bb0889) in parent group-v653649. [ 808.937063] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Creating folder: Instances. Parent ref: group-v653796. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 808.937768] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-033f08b4-42f0-4fff-ac81-22ad63c6ae28 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.948585] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Created folder: Instances in parent group-v653796. [ 808.948872] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 808.949138] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 808.950251] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-610f8ffb-0254-44ff-9915-d6a5f3a55a8a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.966129] env[69328]: DEBUG nova.compute.manager [req-b814f927-cdd2-4f5d-9be6-d8c2059fd049 req-e0d268fe-36c9-4ca3-87b7-934484949438 service nova] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] Received event network-changed-e638f147-afff-45b0-bf3e-f63debcc2d53 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 808.966338] env[69328]: DEBUG nova.compute.manager [req-b814f927-cdd2-4f5d-9be6-d8c2059fd049 req-e0d268fe-36c9-4ca3-87b7-934484949438 service nova] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] Refreshing instance network info cache due to event network-changed-e638f147-afff-45b0-bf3e-f63debcc2d53. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 808.966553] env[69328]: DEBUG oslo_concurrency.lockutils [req-b814f927-cdd2-4f5d-9be6-d8c2059fd049 req-e0d268fe-36c9-4ca3-87b7-934484949438 service nova] Acquiring lock "refresh_cache-07b1f872-02bc-471f-97d6-3a781075bee5" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.966693] env[69328]: DEBUG oslo_concurrency.lockutils [req-b814f927-cdd2-4f5d-9be6-d8c2059fd049 req-e0d268fe-36c9-4ca3-87b7-934484949438 service nova] Acquired lock "refresh_cache-07b1f872-02bc-471f-97d6-3a781075bee5" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 808.966847] env[69328]: DEBUG nova.network.neutron [req-b814f927-cdd2-4f5d-9be6-d8c2059fd049 req-e0d268fe-36c9-4ca3-87b7-934484949438 service nova] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] Refreshing network info cache for port e638f147-afff-45b0-bf3e-f63debcc2d53 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 808.975046] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 808.975046] env[69328]: value = "task-3273271" [ 808.975046] env[69328]: _type = "Task" [ 808.975046] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.984567] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273271, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.995442] env[69328]: DEBUG oslo_vmware.api [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273268, 'name': PowerOnVM_Task, 'duration_secs': 0.534074} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.995812] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 808.996070] env[69328]: DEBUG nova.compute.manager [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 808.996872] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c26d3f1f-ff7a-45c4-a32b-f0a25f1be193 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.341971] env[69328]: DEBUG oslo_concurrency.lockutils [None req-44f3ab11-d601-4dcc-8461-7aed79509981 tempest-ListImageFiltersTestJSON-1763724709 tempest-ListImageFiltersTestJSON-1763724709-project-member] Lock "5b0e8bef-dcfc-4c5e-89d2-aa1748050d29" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.413s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 809.447590] env[69328]: DEBUG nova.network.neutron [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] Successfully updated port: 54919328-7a97-481c-bd6d-056207108b76 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 809.448896] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ec12dc2-be2a-4370-aa34-03498ef00a89 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 809.449749] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-396f5189-965e-4858-9922-39395d95589f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.462429] env[69328]: DEBUG oslo_vmware.api [None req-3ec12dc2-be2a-4370-aa34-03498ef00a89 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 809.462429] env[69328]: value = "task-3273272" [ 809.462429] env[69328]: _type = "Task" [ 809.462429] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.475041] env[69328]: DEBUG oslo_vmware.api [None req-3ec12dc2-be2a-4370-aa34-03498ef00a89 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273272, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.487785] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273271, 'name': CreateVM_Task, 'duration_secs': 0.393326} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.488477] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 809.489617] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 809.489881] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 809.490281] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 809.490580] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d905527e-22b7-4766-9809-511fdbd70c47 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.499182] env[69328]: DEBUG oslo_vmware.api [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Waiting for the task: (returnval){ [ 809.499182] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e894c0-ffd4-6d6c-3871-43a9f2f2bb3d" [ 809.499182] env[69328]: _type = "Task" [ 809.499182] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.512677] env[69328]: DEBUG oslo_vmware.api [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e894c0-ffd4-6d6c-3871-43a9f2f2bb3d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.518450] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 809.811459] env[69328]: DEBUG nova.network.neutron [req-b814f927-cdd2-4f5d-9be6-d8c2059fd049 req-e0d268fe-36c9-4ca3-87b7-934484949438 service nova] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] Updated VIF entry in instance network info cache for port e638f147-afff-45b0-bf3e-f63debcc2d53. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 809.813228] env[69328]: DEBUG nova.network.neutron [req-b814f927-cdd2-4f5d-9be6-d8c2059fd049 req-e0d268fe-36c9-4ca3-87b7-934484949438 service nova] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] Updating instance_info_cache with network_info: [{"id": "e638f147-afff-45b0-bf3e-f63debcc2d53", "address": "fa:16:3e:fe:7f:e3", "network": {"id": "1b79a3b5-fe38-457c-a9e1-5923af4edbf1", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1093544796-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83d21e3b1459418d86eb52fbb6bb0889", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40859343-2baa-45fd-88e3-ebf8aaed2b19", "external-id": "nsx-vlan-transportzone-10", "segmentation_id": 10, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape638f147-af", "ovs_interfaceid": "e638f147-afff-45b0-bf3e-f63debcc2d53", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 809.956241] env[69328]: DEBUG oslo_concurrency.lockutils [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Acquiring lock "refresh_cache-ef7effe4-b37f-4fab-ad24-9d8f72a47ee2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 809.956368] env[69328]: DEBUG oslo_concurrency.lockutils [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Acquired lock "refresh_cache-ef7effe4-b37f-4fab-ad24-9d8f72a47ee2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 809.956497] env[69328]: DEBUG nova.network.neutron [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 809.973216] env[69328]: DEBUG oslo_vmware.api [None req-3ec12dc2-be2a-4370-aa34-03498ef00a89 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273272, 'name': PowerOffVM_Task, 'duration_secs': 0.289702} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.973701] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ec12dc2-be2a-4370-aa34-03498ef00a89 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 809.973701] env[69328]: DEBUG nova.compute.manager [None req-3ec12dc2-be2a-4370-aa34-03498ef00a89 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 809.978284] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-814c160c-901c-4db9-8324-65e0a6b07c9a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.012605] env[69328]: DEBUG oslo_vmware.api [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e894c0-ffd4-6d6c-3871-43a9f2f2bb3d, 'name': SearchDatastore_Task, 'duration_secs': 0.011746} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.013898] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 810.014155] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 810.014401] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 810.014552] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 810.014748] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 810.017742] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3b60f34c-8a96-44e4-8180-0c108a146d37 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.030246] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 810.030246] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 810.031781] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d79fb1b9-6716-46aa-9eb7-62395256ed08 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.042065] env[69328]: DEBUG oslo_vmware.api [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Waiting for the task: (returnval){ [ 810.042065] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]527a9863-b7b1-dff5-5e3e-1cb65256f3dc" [ 810.042065] env[69328]: _type = "Task" [ 810.042065] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.058231] env[69328]: DEBUG oslo_vmware.api [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]527a9863-b7b1-dff5-5e3e-1cb65256f3dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.066687] env[69328]: DEBUG nova.compute.manager [req-67b32fd1-8bbf-4350-a801-cc811cb3c4fe req-47046f49-58c4-4180-94e0-a0991d157fcc service nova] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] Received event network-vif-plugged-54919328-7a97-481c-bd6d-056207108b76 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 810.066901] env[69328]: DEBUG oslo_concurrency.lockutils [req-67b32fd1-8bbf-4350-a801-cc811cb3c4fe req-47046f49-58c4-4180-94e0-a0991d157fcc service nova] Acquiring lock "ef7effe4-b37f-4fab-ad24-9d8f72a47ee2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 810.067125] env[69328]: DEBUG oslo_concurrency.lockutils [req-67b32fd1-8bbf-4350-a801-cc811cb3c4fe req-47046f49-58c4-4180-94e0-a0991d157fcc service nova] Lock "ef7effe4-b37f-4fab-ad24-9d8f72a47ee2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 810.067286] env[69328]: DEBUG oslo_concurrency.lockutils [req-67b32fd1-8bbf-4350-a801-cc811cb3c4fe req-47046f49-58c4-4180-94e0-a0991d157fcc service nova] Lock "ef7effe4-b37f-4fab-ad24-9d8f72a47ee2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 810.067439] env[69328]: DEBUG nova.compute.manager [req-67b32fd1-8bbf-4350-a801-cc811cb3c4fe req-47046f49-58c4-4180-94e0-a0991d157fcc service nova] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] No waiting events found dispatching network-vif-plugged-54919328-7a97-481c-bd6d-056207108b76 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 810.067602] env[69328]: WARNING nova.compute.manager [req-67b32fd1-8bbf-4350-a801-cc811cb3c4fe req-47046f49-58c4-4180-94e0-a0991d157fcc service nova] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] Received unexpected event network-vif-plugged-54919328-7a97-481c-bd6d-056207108b76 for instance with vm_state building and task_state spawning. [ 810.067788] env[69328]: DEBUG nova.compute.manager [req-67b32fd1-8bbf-4350-a801-cc811cb3c4fe req-47046f49-58c4-4180-94e0-a0991d157fcc service nova] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] Received event network-changed-54919328-7a97-481c-bd6d-056207108b76 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 810.067947] env[69328]: DEBUG nova.compute.manager [req-67b32fd1-8bbf-4350-a801-cc811cb3c4fe req-47046f49-58c4-4180-94e0-a0991d157fcc service nova] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] Refreshing instance network info cache due to event network-changed-54919328-7a97-481c-bd6d-056207108b76. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 810.068225] env[69328]: DEBUG oslo_concurrency.lockutils [req-67b32fd1-8bbf-4350-a801-cc811cb3c4fe req-47046f49-58c4-4180-94e0-a0991d157fcc service nova] Acquiring lock "refresh_cache-ef7effe4-b37f-4fab-ad24-9d8f72a47ee2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 810.319444] env[69328]: DEBUG oslo_concurrency.lockutils [req-b814f927-cdd2-4f5d-9be6-d8c2059fd049 req-e0d268fe-36c9-4ca3-87b7-934484949438 service nova] Releasing lock "refresh_cache-07b1f872-02bc-471f-97d6-3a781075bee5" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 810.362896] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68f3502b-75ae-4efa-bad4-678559a81a77 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.379026] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ccaf30a-ca79-470e-89ee-ed0114dadc72 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.412931] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b300600-a7bd-4441-95a5-0b77491fa74b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.422599] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e902e399-510d-400b-a7d3-7924141e0952 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.436952] env[69328]: DEBUG nova.compute.provider_tree [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 810.492959] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3ec12dc2-be2a-4370-aa34-03498ef00a89 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "55f44102-2891-4b6c-b31e-e8255a24d180" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.064s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 810.516689] env[69328]: DEBUG nova.network.neutron [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 810.521065] env[69328]: DEBUG oslo_concurrency.lockutils [None req-03b5f61e-b980-4d3c-9522-27aa6b2948a0 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "d10bee67-6294-4537-9ce7-4eedb8361ddc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 810.521328] env[69328]: DEBUG oslo_concurrency.lockutils [None req-03b5f61e-b980-4d3c-9522-27aa6b2948a0 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "d10bee67-6294-4537-9ce7-4eedb8361ddc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 810.521538] env[69328]: DEBUG oslo_concurrency.lockutils [None req-03b5f61e-b980-4d3c-9522-27aa6b2948a0 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "d10bee67-6294-4537-9ce7-4eedb8361ddc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 810.521726] env[69328]: DEBUG oslo_concurrency.lockutils [None req-03b5f61e-b980-4d3c-9522-27aa6b2948a0 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "d10bee67-6294-4537-9ce7-4eedb8361ddc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 810.521983] env[69328]: DEBUG oslo_concurrency.lockutils [None req-03b5f61e-b980-4d3c-9522-27aa6b2948a0 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "d10bee67-6294-4537-9ce7-4eedb8361ddc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 810.530571] env[69328]: INFO nova.compute.manager [None req-03b5f61e-b980-4d3c-9522-27aa6b2948a0 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Terminating instance [ 810.554586] env[69328]: DEBUG oslo_vmware.api [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]527a9863-b7b1-dff5-5e3e-1cb65256f3dc, 'name': SearchDatastore_Task, 'duration_secs': 0.013703} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.555397] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e898b4d6-9a3a-45d9-9ef7-1f4072215621 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.560903] env[69328]: DEBUG oslo_vmware.api [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Waiting for the task: (returnval){ [ 810.560903] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e209ae-9d0f-9a1d-82aa-67b864c05da1" [ 810.560903] env[69328]: _type = "Task" [ 810.560903] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.569777] env[69328]: DEBUG oslo_vmware.api [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e209ae-9d0f-9a1d-82aa-67b864c05da1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.723802] env[69328]: DEBUG nova.network.neutron [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] Updating instance_info_cache with network_info: [{"id": "54919328-7a97-481c-bd6d-056207108b76", "address": "fa:16:3e:dd:51:9d", "network": {"id": "134bd2c0-e2fc-4db8-b47f-3f0cefb9842c", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1713699500-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9a0fbd7db3ef4a8bbd6799ef5177f25f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fd0eb882-ab95-4373-aa20-ee565a9919e3", "external-id": "nsx-vlan-transportzone-510", "segmentation_id": 510, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54919328-7a", "ovs_interfaceid": "54919328-7a97-481c-bd6d-056207108b76", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 810.944022] env[69328]: DEBUG nova.scheduler.client.report [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 811.035764] env[69328]: DEBUG nova.compute.manager [None req-03b5f61e-b980-4d3c-9522-27aa6b2948a0 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 811.036080] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-03b5f61e-b980-4d3c-9522-27aa6b2948a0 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 811.036999] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e7eb1bf-d197-49ab-af6d-b807b7cb41c9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.046053] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-03b5f61e-b980-4d3c-9522-27aa6b2948a0 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 811.046318] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dc3f55d2-86a0-4500-96a1-442005028696 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.054577] env[69328]: DEBUG oslo_vmware.api [None req-03b5f61e-b980-4d3c-9522-27aa6b2948a0 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 811.054577] env[69328]: value = "task-3273273" [ 811.054577] env[69328]: _type = "Task" [ 811.054577] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.063897] env[69328]: DEBUG oslo_vmware.api [None req-03b5f61e-b980-4d3c-9522-27aa6b2948a0 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273273, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.073062] env[69328]: DEBUG oslo_vmware.api [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e209ae-9d0f-9a1d-82aa-67b864c05da1, 'name': SearchDatastore_Task, 'duration_secs': 0.041971} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.073343] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 811.073620] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 07b1f872-02bc-471f-97d6-3a781075bee5/07b1f872-02bc-471f-97d6-3a781075bee5.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 811.073896] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fbdd6f30-47b6-4f1d-8185-9b7b7316de09 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.081628] env[69328]: DEBUG oslo_vmware.api [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Waiting for the task: (returnval){ [ 811.081628] env[69328]: value = "task-3273274" [ 811.081628] env[69328]: _type = "Task" [ 811.081628] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.090327] env[69328]: DEBUG oslo_vmware.api [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Task: {'id': task-3273274, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.226674] env[69328]: DEBUG oslo_concurrency.lockutils [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Releasing lock "refresh_cache-ef7effe4-b37f-4fab-ad24-9d8f72a47ee2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 811.227089] env[69328]: DEBUG nova.compute.manager [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] Instance network_info: |[{"id": "54919328-7a97-481c-bd6d-056207108b76", "address": "fa:16:3e:dd:51:9d", "network": {"id": "134bd2c0-e2fc-4db8-b47f-3f0cefb9842c", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1713699500-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9a0fbd7db3ef4a8bbd6799ef5177f25f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fd0eb882-ab95-4373-aa20-ee565a9919e3", "external-id": "nsx-vlan-transportzone-510", "segmentation_id": 510, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54919328-7a", "ovs_interfaceid": "54919328-7a97-481c-bd6d-056207108b76", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 811.227922] env[69328]: DEBUG oslo_concurrency.lockutils [req-67b32fd1-8bbf-4350-a801-cc811cb3c4fe req-47046f49-58c4-4180-94e0-a0991d157fcc service nova] Acquired lock "refresh_cache-ef7effe4-b37f-4fab-ad24-9d8f72a47ee2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 811.227922] env[69328]: DEBUG nova.network.neutron [req-67b32fd1-8bbf-4350-a801-cc811cb3c4fe req-47046f49-58c4-4180-94e0-a0991d157fcc service nova] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] Refreshing network info cache for port 54919328-7a97-481c-bd6d-056207108b76 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 811.230553] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dd:51:9d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fd0eb882-ab95-4373-aa20-ee565a9919e3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '54919328-7a97-481c-bd6d-056207108b76', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 811.245225] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Creating folder: Project (9a0fbd7db3ef4a8bbd6799ef5177f25f). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 811.251640] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7b968e54-3409-426f-846d-c54b4ace94c7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.267327] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Created folder: Project (9a0fbd7db3ef4a8bbd6799ef5177f25f) in parent group-v653649. [ 811.267725] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Creating folder: Instances. Parent ref: group-v653799. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 811.267725] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-20d3fa5e-1e96-47f5-afc4-c80e989fd20c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.281716] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Created folder: Instances in parent group-v653799. [ 811.282089] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 811.282529] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 811.282750] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-43f43100-aeee-48ea-96c6-befe926a08f4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.306582] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 811.306582] env[69328]: value = "task-3273277" [ 811.306582] env[69328]: _type = "Task" [ 811.306582] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.319576] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273277, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.334228] env[69328]: DEBUG oslo_concurrency.lockutils [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "fd72bae3-cb72-48d0-a0df-9ea3a770a86c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 811.334814] env[69328]: DEBUG oslo_concurrency.lockutils [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "fd72bae3-cb72-48d0-a0df-9ea3a770a86c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 811.362234] env[69328]: INFO nova.compute.manager [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Rebuilding instance [ 811.421487] env[69328]: DEBUG nova.compute.manager [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 811.422536] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad513731-630b-4be9-a75d-ac45faa6547c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.446958] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.633s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 811.447543] env[69328]: DEBUG nova.compute.manager [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 811.456164] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5c10a2ba-3a2a-429c-9b99-15e12f9449ea tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.603s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 811.456164] env[69328]: DEBUG nova.objects.instance [None req-5c10a2ba-3a2a-429c-9b99-15e12f9449ea tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Lazy-loading 'resources' on Instance uuid d724a141-35e7-4483-99aa-8a17066fb63b {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 811.544890] env[69328]: DEBUG nova.network.neutron [req-67b32fd1-8bbf-4350-a801-cc811cb3c4fe req-47046f49-58c4-4180-94e0-a0991d157fcc service nova] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] Updated VIF entry in instance network info cache for port 54919328-7a97-481c-bd6d-056207108b76. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 811.545300] env[69328]: DEBUG nova.network.neutron [req-67b32fd1-8bbf-4350-a801-cc811cb3c4fe req-47046f49-58c4-4180-94e0-a0991d157fcc service nova] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] Updating instance_info_cache with network_info: [{"id": "54919328-7a97-481c-bd6d-056207108b76", "address": "fa:16:3e:dd:51:9d", "network": {"id": "134bd2c0-e2fc-4db8-b47f-3f0cefb9842c", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1713699500-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9a0fbd7db3ef4a8bbd6799ef5177f25f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fd0eb882-ab95-4373-aa20-ee565a9919e3", "external-id": "nsx-vlan-transportzone-510", "segmentation_id": 510, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54919328-7a", "ovs_interfaceid": "54919328-7a97-481c-bd6d-056207108b76", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 811.567398] env[69328]: DEBUG oslo_vmware.api [None req-03b5f61e-b980-4d3c-9522-27aa6b2948a0 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273273, 'name': PowerOffVM_Task, 'duration_secs': 0.226777} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.567753] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-03b5f61e-b980-4d3c-9522-27aa6b2948a0 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 811.567874] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-03b5f61e-b980-4d3c-9522-27aa6b2948a0 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 811.568171] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b70097bf-c625-4892-99db-4dc24021dd3e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.594121] env[69328]: DEBUG oslo_vmware.api [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Task: {'id': task-3273274, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.502738} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.594390] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 07b1f872-02bc-471f-97d6-3a781075bee5/07b1f872-02bc-471f-97d6-3a781075bee5.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 811.594599] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 811.594855] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-63e3ff28-0160-4425-b76e-5323bb6b83d2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.602992] env[69328]: DEBUG oslo_vmware.api [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Waiting for the task: (returnval){ [ 811.602992] env[69328]: value = "task-3273279" [ 811.602992] env[69328]: _type = "Task" [ 811.602992] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.612845] env[69328]: DEBUG oslo_vmware.api [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Task: {'id': task-3273279, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.666211] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-03b5f61e-b980-4d3c-9522-27aa6b2948a0 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 811.666211] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-03b5f61e-b980-4d3c-9522-27aa6b2948a0 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 811.666211] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-03b5f61e-b980-4d3c-9522-27aa6b2948a0 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Deleting the datastore file [datastore1] d10bee67-6294-4537-9ce7-4eedb8361ddc {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 811.666211] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6ade6790-379d-4bdc-a954-5cf107414071 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.672376] env[69328]: DEBUG oslo_vmware.api [None req-03b5f61e-b980-4d3c-9522-27aa6b2948a0 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 811.672376] env[69328]: value = "task-3273280" [ 811.672376] env[69328]: _type = "Task" [ 811.672376] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.683175] env[69328]: DEBUG oslo_vmware.api [None req-03b5f61e-b980-4d3c-9522-27aa6b2948a0 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273280, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.820252] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273277, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.961298] env[69328]: DEBUG nova.compute.utils [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 811.963272] env[69328]: DEBUG nova.compute.manager [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Not allocating networking since 'none' was specified. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 812.047974] env[69328]: DEBUG oslo_concurrency.lockutils [req-67b32fd1-8bbf-4350-a801-cc811cb3c4fe req-47046f49-58c4-4180-94e0-a0991d157fcc service nova] Releasing lock "refresh_cache-ef7effe4-b37f-4fab-ad24-9d8f72a47ee2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 812.115570] env[69328]: DEBUG oslo_vmware.api [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Task: {'id': task-3273279, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079632} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.115804] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 812.116616] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-720b2c59-b39c-419b-96d7-0916418cfe42 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.145640] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] Reconfiguring VM instance instance-00000034 to attach disk [datastore2] 07b1f872-02bc-471f-97d6-3a781075bee5/07b1f872-02bc-471f-97d6-3a781075bee5.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 812.149616] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9b1b0ebc-bcfa-487e-85ac-4492d5a95f82 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.173934] env[69328]: DEBUG oslo_vmware.api [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Waiting for the task: (returnval){ [ 812.173934] env[69328]: value = "task-3273281" [ 812.173934] env[69328]: _type = "Task" [ 812.173934] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.192585] env[69328]: DEBUG oslo_vmware.api [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Task: {'id': task-3273281, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.193125] env[69328]: DEBUG oslo_vmware.api [None req-03b5f61e-b980-4d3c-9522-27aa6b2948a0 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273280, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.322777] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273277, 'name': CreateVM_Task, 'duration_secs': 0.595373} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.323530] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 812.324251] env[69328]: DEBUG oslo_concurrency.lockutils [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 812.324642] env[69328]: DEBUG oslo_concurrency.lockutils [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 812.325019] env[69328]: DEBUG oslo_concurrency.lockutils [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 812.325290] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f80ef06-27c6-4372-a7bb-65ce06152976 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.331509] env[69328]: DEBUG oslo_vmware.api [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Waiting for the task: (returnval){ [ 812.331509] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5229b03c-5485-0cb7-7374-cfa298d3b0c2" [ 812.331509] env[69328]: _type = "Task" [ 812.331509] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.344379] env[69328]: DEBUG oslo_vmware.api [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5229b03c-5485-0cb7-7374-cfa298d3b0c2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.440631] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 812.441833] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-68b847e0-0b03-427a-bc62-aaf63a8074d5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.449988] env[69328]: DEBUG oslo_vmware.api [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 812.449988] env[69328]: value = "task-3273282" [ 812.449988] env[69328]: _type = "Task" [ 812.449988] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.464792] env[69328]: DEBUG nova.compute.manager [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 812.467250] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] VM already powered off {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 812.467491] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 812.468547] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebc517a3-514f-428a-ac4e-d57f8eae8640 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.478058] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 812.482698] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3dac475f-6d57-42f2-9c48-c472e35e7862 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.501025] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ef4b813-8ff6-4d5c-ab41-b752e87b216c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.516893] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dd1739a-021e-4965-8e6e-3b870c9bfb6d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.550733] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ace07bee-574f-4bcc-86fe-2bec21fc9046 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.561315] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdfb26eb-7093-4075-b8c4-520d7050e2a8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.565796] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 812.566744] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 812.566744] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Deleting the datastore file [datastore2] 55f44102-2891-4b6c-b31e-e8255a24d180 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 812.566905] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-53968bb0-0030-4070-a850-2a931be96b4b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.580856] env[69328]: DEBUG nova.compute.provider_tree [None req-5c10a2ba-3a2a-429c-9b99-15e12f9449ea tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 812.583908] env[69328]: DEBUG oslo_vmware.api [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 812.583908] env[69328]: value = "task-3273284" [ 812.583908] env[69328]: _type = "Task" [ 812.583908] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.592388] env[69328]: DEBUG oslo_vmware.api [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273284, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.690032] env[69328]: DEBUG oslo_vmware.api [None req-03b5f61e-b980-4d3c-9522-27aa6b2948a0 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273280, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.544084} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.692334] env[69328]: DEBUG oslo_vmware.api [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Task: {'id': task-3273281, 'name': ReconfigVM_Task, 'duration_secs': 0.42947} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.692334] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-03b5f61e-b980-4d3c-9522-27aa6b2948a0 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 812.692334] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-03b5f61e-b980-4d3c-9522-27aa6b2948a0 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 812.692334] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-03b5f61e-b980-4d3c-9522-27aa6b2948a0 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 812.692334] env[69328]: INFO nova.compute.manager [None req-03b5f61e-b980-4d3c-9522-27aa6b2948a0 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Took 1.66 seconds to destroy the instance on the hypervisor. [ 812.692574] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-03b5f61e-b980-4d3c-9522-27aa6b2948a0 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 812.692574] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] Reconfigured VM instance instance-00000034 to attach disk [datastore2] 07b1f872-02bc-471f-97d6-3a781075bee5/07b1f872-02bc-471f-97d6-3a781075bee5.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 812.692998] env[69328]: DEBUG nova.compute.manager [-] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 812.692998] env[69328]: DEBUG nova.network.neutron [-] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 812.694537] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8d916d93-3b8e-4c3a-a70c-2d66d6d39212 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.704320] env[69328]: DEBUG oslo_vmware.api [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Waiting for the task: (returnval){ [ 812.704320] env[69328]: value = "task-3273285" [ 812.704320] env[69328]: _type = "Task" [ 812.704320] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.712998] env[69328]: DEBUG oslo_vmware.api [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Task: {'id': task-3273285, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.848871] env[69328]: DEBUG oslo_vmware.api [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5229b03c-5485-0cb7-7374-cfa298d3b0c2, 'name': SearchDatastore_Task, 'duration_secs': 0.022064} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.849156] env[69328]: DEBUG oslo_concurrency.lockutils [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 812.849398] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 812.849645] env[69328]: DEBUG oslo_concurrency.lockutils [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 812.849789] env[69328]: DEBUG oslo_concurrency.lockutils [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 812.849963] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 812.850243] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-71722a6d-417b-4114-8864-d47d9685ef62 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.863135] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 812.863135] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 812.863135] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8cffcca7-e71e-4dd8-adbf-c51a6666cc17 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.874513] env[69328]: DEBUG oslo_vmware.api [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Waiting for the task: (returnval){ [ 812.874513] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]523d01b1-cb51-829f-09cd-bf28bc449ba3" [ 812.874513] env[69328]: _type = "Task" [ 812.874513] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.883609] env[69328]: DEBUG oslo_vmware.api [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]523d01b1-cb51-829f-09cd-bf28bc449ba3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.086521] env[69328]: DEBUG nova.scheduler.client.report [None req-5c10a2ba-3a2a-429c-9b99-15e12f9449ea tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 813.100751] env[69328]: DEBUG oslo_vmware.api [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273284, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.2497} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.100944] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 813.101158] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 813.101604] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 813.136966] env[69328]: DEBUG nova.compute.manager [req-d88d943e-23c4-42dc-88c9-f60d98fd7c69 req-87ba7fa6-0001-482c-a11d-67bd1174d8e7 service nova] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Received event network-vif-deleted-7e9163b1-a349-4287-bbfe-8147dc2e52dd {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 813.136966] env[69328]: INFO nova.compute.manager [req-d88d943e-23c4-42dc-88c9-f60d98fd7c69 req-87ba7fa6-0001-482c-a11d-67bd1174d8e7 service nova] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Neutron deleted interface 7e9163b1-a349-4287-bbfe-8147dc2e52dd; detaching it from the instance and deleting it from the info cache [ 813.136966] env[69328]: DEBUG nova.network.neutron [req-d88d943e-23c4-42dc-88c9-f60d98fd7c69 req-87ba7fa6-0001-482c-a11d-67bd1174d8e7 service nova] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 813.216220] env[69328]: DEBUG oslo_vmware.api [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Task: {'id': task-3273285, 'name': Rename_Task, 'duration_secs': 0.204773} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.216789] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 813.216789] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-94f8e88e-07da-41f7-9c5f-cfff95738da1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.224104] env[69328]: DEBUG oslo_vmware.api [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Waiting for the task: (returnval){ [ 813.224104] env[69328]: value = "task-3273286" [ 813.224104] env[69328]: _type = "Task" [ 813.224104] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.235621] env[69328]: DEBUG oslo_vmware.api [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Task: {'id': task-3273286, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.390303] env[69328]: DEBUG oslo_vmware.api [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]523d01b1-cb51-829f-09cd-bf28bc449ba3, 'name': SearchDatastore_Task, 'duration_secs': 0.013392} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.391159] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9ca2c00-d0de-474c-a5f3-b2208434e4fe {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.398028] env[69328]: DEBUG oslo_vmware.api [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Waiting for the task: (returnval){ [ 813.398028] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]521a8847-d59d-2a6c-96e6-2c8036577748" [ 813.398028] env[69328]: _type = "Task" [ 813.398028] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.406749] env[69328]: DEBUG oslo_vmware.api [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]521a8847-d59d-2a6c-96e6-2c8036577748, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.479917] env[69328]: DEBUG nova.compute.manager [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 813.515446] env[69328]: DEBUG nova.virt.hardware [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 813.515720] env[69328]: DEBUG nova.virt.hardware [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 813.515877] env[69328]: DEBUG nova.virt.hardware [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 813.516349] env[69328]: DEBUG nova.virt.hardware [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 813.516935] env[69328]: DEBUG nova.virt.hardware [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 813.517025] env[69328]: DEBUG nova.virt.hardware [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 813.517474] env[69328]: DEBUG nova.virt.hardware [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 813.518043] env[69328]: DEBUG nova.virt.hardware [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 813.518560] env[69328]: DEBUG nova.virt.hardware [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 813.518918] env[69328]: DEBUG nova.virt.hardware [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 813.519306] env[69328]: DEBUG nova.virt.hardware [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 813.519732] env[69328]: DEBUG nova.network.neutron [-] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 813.524091] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d45da13f-6a8f-4ae4-8c57-2ec289a0cd5b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.534076] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d5efc17-7e59-40f3-8eb7-2be3f6dc61cf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.550329] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Instance VIF info [] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 813.556025] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Creating folder: Project (24439df42b644b0dbaba3a632ed2f07c). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 813.556558] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4d15af68-09a0-4941-b482-6e6f555d3892 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.570968] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Created folder: Project (24439df42b644b0dbaba3a632ed2f07c) in parent group-v653649. [ 813.571254] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Creating folder: Instances. Parent ref: group-v653802. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 813.571534] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0f3347ae-17ed-4b39-848f-51445cba7380 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.583833] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Created folder: Instances in parent group-v653802. [ 813.584084] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 813.584282] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 813.584576] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f46ca38c-8142-449a-ae19-1e38744324ca {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.597457] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5c10a2ba-3a2a-429c-9b99-15e12f9449ea tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.144s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 813.600020] env[69328]: DEBUG oslo_concurrency.lockutils [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.701s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 813.603603] env[69328]: INFO nova.compute.claims [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 813.613375] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 813.613375] env[69328]: value = "task-3273289" [ 813.613375] env[69328]: _type = "Task" [ 813.613375] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.623293] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273289, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.630510] env[69328]: INFO nova.scheduler.client.report [None req-5c10a2ba-3a2a-429c-9b99-15e12f9449ea tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Deleted allocations for instance d724a141-35e7-4483-99aa-8a17066fb63b [ 813.639263] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ba8f8811-1415-411f-bdb7-9c9262cf753e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.656507] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5edaf32e-516d-407b-a0f6-e5f18d6ed971 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.694071] env[69328]: DEBUG nova.compute.manager [req-d88d943e-23c4-42dc-88c9-f60d98fd7c69 req-87ba7fa6-0001-482c-a11d-67bd1174d8e7 service nova] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Detach interface failed, port_id=7e9163b1-a349-4287-bbfe-8147dc2e52dd, reason: Instance d10bee67-6294-4537-9ce7-4eedb8361ddc could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 813.737497] env[69328]: DEBUG oslo_vmware.api [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Task: {'id': task-3273286, 'name': PowerOnVM_Task, 'duration_secs': 0.4698} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.737762] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 813.738109] env[69328]: INFO nova.compute.manager [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] Took 7.72 seconds to spawn the instance on the hypervisor. [ 813.738422] env[69328]: DEBUG nova.compute.manager [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 813.739471] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e0131c8-f954-42ee-b050-65dbd11b7596 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.912440] env[69328]: DEBUG oslo_vmware.api [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]521a8847-d59d-2a6c-96e6-2c8036577748, 'name': SearchDatastore_Task, 'duration_secs': 0.012078} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.913167] env[69328]: DEBUG oslo_concurrency.lockutils [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 813.913613] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] ef7effe4-b37f-4fab-ad24-9d8f72a47ee2/ef7effe4-b37f-4fab-ad24-9d8f72a47ee2.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 813.915296] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5088698b-a8af-460f-bf77-fc1d31540eb7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.923334] env[69328]: DEBUG oslo_vmware.api [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Waiting for the task: (returnval){ [ 813.923334] env[69328]: value = "task-3273290" [ 813.923334] env[69328]: _type = "Task" [ 813.923334] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.936870] env[69328]: DEBUG oslo_vmware.api [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Task: {'id': task-3273290, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.030407] env[69328]: INFO nova.compute.manager [-] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Took 1.34 seconds to deallocate network for instance. [ 814.128888] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273289, 'name': CreateVM_Task, 'duration_secs': 0.375278} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.128888] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 814.128888] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 814.129065] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 814.129519] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 814.129790] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50d0350a-2f7b-47de-82dc-6a8d78ccb7a2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.147618] env[69328]: DEBUG oslo_vmware.api [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Waiting for the task: (returnval){ [ 814.147618] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b572ac-a193-96a5-73e6-4a4550b4d6c5" [ 814.147618] env[69328]: _type = "Task" [ 814.147618] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.147790] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5c10a2ba-3a2a-429c-9b99-15e12f9449ea tempest-ServerPasswordTestJSON-939019426 tempest-ServerPasswordTestJSON-939019426-project-member] Lock "d724a141-35e7-4483-99aa-8a17066fb63b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.797s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 814.154798] env[69328]: DEBUG nova.virt.hardware [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 814.155097] env[69328]: DEBUG nova.virt.hardware [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 814.155343] env[69328]: DEBUG nova.virt.hardware [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 814.155548] env[69328]: DEBUG nova.virt.hardware [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 814.155692] env[69328]: DEBUG nova.virt.hardware [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 814.155833] env[69328]: DEBUG nova.virt.hardware [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 814.156051] env[69328]: DEBUG nova.virt.hardware [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 814.157974] env[69328]: DEBUG nova.virt.hardware [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 814.157974] env[69328]: DEBUG nova.virt.hardware [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 814.157974] env[69328]: DEBUG nova.virt.hardware [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 814.157974] env[69328]: DEBUG nova.virt.hardware [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 814.162439] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c8c36fa-1c2f-4e54-af78-c34014275c75 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.174245] env[69328]: DEBUG oslo_vmware.api [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b572ac-a193-96a5-73e6-4a4550b4d6c5, 'name': SearchDatastore_Task, 'duration_secs': 0.011108} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.175153] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 814.175416] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 814.175749] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 814.176616] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 814.176616] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 814.178897] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-49c682cf-79e1-4511-9d03-2188e266b63b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.182678] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fcfe720-6f6a-4659-abb6-e140b7bebb18 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.200191] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cc:fb:3a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1768af3d-3317-4ef5-b484-0c2707d63de7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '87b2e37a-d778-4bd1-a107-6132378b5f4c', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 814.208187] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 814.210856] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 814.211143] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 814.211351] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 814.212085] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ab5c0a3d-b5f5-4e05-853f-ae11e282c26a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.226083] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae8e49d0-1251-42c9-9112-ce5f59c96610 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.233192] env[69328]: DEBUG oslo_vmware.api [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Waiting for the task: (returnval){ [ 814.233192] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]526eeaa8-0c7f-4570-64d7-ce32e6a26be1" [ 814.233192] env[69328]: _type = "Task" [ 814.233192] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.237582] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 814.237582] env[69328]: value = "task-3273291" [ 814.237582] env[69328]: _type = "Task" [ 814.237582] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.246201] env[69328]: DEBUG oslo_vmware.api [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]526eeaa8-0c7f-4570-64d7-ce32e6a26be1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.256787] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273291, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.262547] env[69328]: INFO nova.compute.manager [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] Took 38.62 seconds to build instance. [ 814.438629] env[69328]: DEBUG oslo_vmware.api [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Task: {'id': task-3273290, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.540837] env[69328]: DEBUG oslo_concurrency.lockutils [None req-03b5f61e-b980-4d3c-9522-27aa6b2948a0 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 814.574376] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Acquiring lock "55d9ba65-e5c8-446a-a209-a840f30ff02c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 814.574376] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Lock "55d9ba65-e5c8-446a-a209-a840f30ff02c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 814.749104] env[69328]: DEBUG oslo_vmware.api [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]526eeaa8-0c7f-4570-64d7-ce32e6a26be1, 'name': SearchDatastore_Task, 'duration_secs': 0.011939} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.749665] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad6b560a-b161-4bd8-9978-fbe82866d3a7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.755325] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273291, 'name': CreateVM_Task, 'duration_secs': 0.395193} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.758278] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 814.759609] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 814.759992] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 814.760428] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 814.761873] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc28d62e-8deb-411b-8d76-6fda38f19c88 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.763662] env[69328]: DEBUG oslo_vmware.api [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Waiting for the task: (returnval){ [ 814.763662] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ad40cf-0e91-15c4-ada0-ede9b2635fb7" [ 814.763662] env[69328]: _type = "Task" [ 814.763662] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.768578] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3210677d-0d35-4336-8f53-1fd71df694a4 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Lock "07b1f872-02bc-471f-97d6-3a781075bee5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 82.222s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 814.769262] env[69328]: DEBUG oslo_vmware.api [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 814.769262] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52890a9a-5ef5-a6a8-321b-583060c68e77" [ 814.769262] env[69328]: _type = "Task" [ 814.769262] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.780290] env[69328]: DEBUG oslo_vmware.api [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ad40cf-0e91-15c4-ada0-ede9b2635fb7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.786326] env[69328]: DEBUG oslo_vmware.api [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52890a9a-5ef5-a6a8-321b-583060c68e77, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.941281] env[69328]: DEBUG oslo_vmware.api [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Task: {'id': task-3273290, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.713997} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.941622] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] ef7effe4-b37f-4fab-ad24-9d8f72a47ee2/ef7effe4-b37f-4fab-ad24-9d8f72a47ee2.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 814.941876] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 814.942185] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-400a61cd-1b39-4dc6-8afc-8b68e1a6da82 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.952710] env[69328]: DEBUG oslo_vmware.api [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Waiting for the task: (returnval){ [ 814.952710] env[69328]: value = "task-3273292" [ 814.952710] env[69328]: _type = "Task" [ 814.952710] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.964834] env[69328]: DEBUG oslo_vmware.api [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Task: {'id': task-3273292, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.060687] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f02a1f41-4538-471f-b31a-ed84c96c89dd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.069537] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef64634b-3505-4f90-bae2-d6b4b009a86b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.101627] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-525cc246-a872-4552-9a22-838e06842bf5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.111557] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9fc774c-3c11-470f-a3a7-4052cbf7dd6c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.126675] env[69328]: DEBUG nova.compute.provider_tree [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 815.274365] env[69328]: DEBUG nova.compute.manager [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 815.276832] env[69328]: DEBUG oslo_vmware.api [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ad40cf-0e91-15c4-ada0-ede9b2635fb7, 'name': SearchDatastore_Task, 'duration_secs': 0.025217} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.280533] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 815.280533] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b/b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 815.280533] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3b6cfa3d-54f8-4b4d-a5e1-ca34346daafe {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.288954] env[69328]: DEBUG oslo_vmware.api [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52890a9a-5ef5-a6a8-321b-583060c68e77, 'name': SearchDatastore_Task, 'duration_secs': 0.048054} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.290291] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 815.290525] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 815.290762] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 815.291511] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 815.291511] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 815.291664] env[69328]: DEBUG oslo_vmware.api [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Waiting for the task: (returnval){ [ 815.291664] env[69328]: value = "task-3273293" [ 815.291664] env[69328]: _type = "Task" [ 815.291664] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.293048] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b192ad1d-b821-4aa2-bccf-069e63d73f92 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.303670] env[69328]: DEBUG oslo_vmware.api [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Task: {'id': task-3273293, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.308194] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 815.308380] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 815.309514] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa3bee9d-e85a-4878-a43b-a8d4c302bef2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.315662] env[69328]: DEBUG oslo_vmware.api [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 815.315662] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5287b657-0401-5db9-6769-76c90f456709" [ 815.315662] env[69328]: _type = "Task" [ 815.315662] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.326600] env[69328]: DEBUG oslo_vmware.api [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5287b657-0401-5db9-6769-76c90f456709, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.464889] env[69328]: DEBUG oslo_vmware.api [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Task: {'id': task-3273292, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071921} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.465233] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 815.466060] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d61149d3-35ed-44ac-8bc5-77b8cc926243 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.492039] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] Reconfiguring VM instance instance-00000035 to attach disk [datastore2] ef7effe4-b37f-4fab-ad24-9d8f72a47ee2/ef7effe4-b37f-4fab-ad24-9d8f72a47ee2.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 815.492998] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2ab669e7-593b-4ab0-87e6-a5ddbc0c39c4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.515758] env[69328]: DEBUG oslo_vmware.api [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Waiting for the task: (returnval){ [ 815.515758] env[69328]: value = "task-3273294" [ 815.515758] env[69328]: _type = "Task" [ 815.515758] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.523641] env[69328]: DEBUG oslo_vmware.api [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Task: {'id': task-3273294, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.630709] env[69328]: DEBUG nova.scheduler.client.report [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 815.805435] env[69328]: DEBUG oslo_vmware.api [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Task: {'id': task-3273293, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.829205] env[69328]: DEBUG oslo_vmware.api [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5287b657-0401-5db9-6769-76c90f456709, 'name': SearchDatastore_Task, 'duration_secs': 0.023398} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.830203] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-477b6bba-7c6c-413d-a93a-56607adedf73 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.838609] env[69328]: DEBUG oslo_vmware.api [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 815.838609] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52a0424c-e6d8-64a0-3047-9e3ee4f3d8ee" [ 815.838609] env[69328]: _type = "Task" [ 815.838609] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.848956] env[69328]: DEBUG oslo_vmware.api [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52a0424c-e6d8-64a0-3047-9e3ee4f3d8ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.850132] env[69328]: DEBUG oslo_concurrency.lockutils [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 815.860856] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a1d308d-b267-4d92-84b1-af0eebcb5224 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Acquiring lock "07b1f872-02bc-471f-97d6-3a781075bee5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 815.861203] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a1d308d-b267-4d92-84b1-af0eebcb5224 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Lock "07b1f872-02bc-471f-97d6-3a781075bee5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 815.861454] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a1d308d-b267-4d92-84b1-af0eebcb5224 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Acquiring lock "07b1f872-02bc-471f-97d6-3a781075bee5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 815.861650] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a1d308d-b267-4d92-84b1-af0eebcb5224 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Lock "07b1f872-02bc-471f-97d6-3a781075bee5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 815.861816] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a1d308d-b267-4d92-84b1-af0eebcb5224 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Lock "07b1f872-02bc-471f-97d6-3a781075bee5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 815.864611] env[69328]: INFO nova.compute.manager [None req-4a1d308d-b267-4d92-84b1-af0eebcb5224 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] Terminating instance [ 816.007415] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 816.007722] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 816.026679] env[69328]: DEBUG oslo_vmware.api [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Task: {'id': task-3273294, 'name': ReconfigVM_Task, 'duration_secs': 0.334299} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.026991] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] Reconfigured VM instance instance-00000035 to attach disk [datastore2] ef7effe4-b37f-4fab-ad24-9d8f72a47ee2/ef7effe4-b37f-4fab-ad24-9d8f72a47ee2.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 816.027687] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fc4f52bc-5cc6-41b0-9ee4-157841e4c4d2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.035614] env[69328]: DEBUG oslo_vmware.api [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Waiting for the task: (returnval){ [ 816.035614] env[69328]: value = "task-3273295" [ 816.035614] env[69328]: _type = "Task" [ 816.035614] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.045326] env[69328]: DEBUG oslo_vmware.api [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Task: {'id': task-3273295, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.135559] env[69328]: DEBUG oslo_concurrency.lockutils [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.536s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 816.136118] env[69328]: DEBUG nova.compute.manager [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 816.139024] env[69328]: DEBUG oslo_concurrency.lockutils [None req-da255cd1-6657-45d4-a96b-d26ccab796c8 tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.303s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 816.139247] env[69328]: DEBUG nova.objects.instance [None req-da255cd1-6657-45d4-a96b-d26ccab796c8 tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Lazy-loading 'resources' on Instance uuid f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 816.305603] env[69328]: DEBUG oslo_vmware.api [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Task: {'id': task-3273293, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.795441} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.305884] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b/b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 816.306109] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 816.306373] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8389ad70-5b32-4ef9-9e75-267df4ed6ce7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.315571] env[69328]: DEBUG oslo_vmware.api [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Waiting for the task: (returnval){ [ 816.315571] env[69328]: value = "task-3273296" [ 816.315571] env[69328]: _type = "Task" [ 816.315571] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.324221] env[69328]: DEBUG oslo_vmware.api [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Task: {'id': task-3273296, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.352061] env[69328]: DEBUG oslo_vmware.api [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52a0424c-e6d8-64a0-3047-9e3ee4f3d8ee, 'name': SearchDatastore_Task, 'duration_secs': 0.055841} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.352253] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 816.352558] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 55f44102-2891-4b6c-b31e-e8255a24d180/55f44102-2891-4b6c-b31e-e8255a24d180.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 816.352852] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6aad8686-1fb7-4d91-bbeb-355f715e43ce {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.360214] env[69328]: DEBUG oslo_vmware.api [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 816.360214] env[69328]: value = "task-3273297" [ 816.360214] env[69328]: _type = "Task" [ 816.360214] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.368964] env[69328]: DEBUG oslo_vmware.api [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273297, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.371853] env[69328]: DEBUG nova.compute.manager [None req-4a1d308d-b267-4d92-84b1-af0eebcb5224 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 816.372049] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4a1d308d-b267-4d92-84b1-af0eebcb5224 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 816.372880] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dc31a00-a125-47f2-a650-a95bacb0d3d8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.380809] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a1d308d-b267-4d92-84b1-af0eebcb5224 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 816.381123] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e400ba37-abfa-40c6-8d70-c4b0d4cf558c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.388648] env[69328]: DEBUG oslo_vmware.api [None req-4a1d308d-b267-4d92-84b1-af0eebcb5224 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Waiting for the task: (returnval){ [ 816.388648] env[69328]: value = "task-3273298" [ 816.388648] env[69328]: _type = "Task" [ 816.388648] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.401776] env[69328]: DEBUG oslo_vmware.api [None req-4a1d308d-b267-4d92-84b1-af0eebcb5224 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Task: {'id': task-3273298, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.519259] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 816.519259] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 816.519259] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 816.519259] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 816.519259] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 816.519259] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 816.519915] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69328) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 816.519915] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager.update_available_resource {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 816.547793] env[69328]: DEBUG oslo_vmware.api [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Task: {'id': task-3273295, 'name': Rename_Task, 'duration_secs': 0.142798} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.548155] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 816.548501] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fef83845-9289-43c2-8cec-bb20bcdaab33 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.562292] env[69328]: DEBUG oslo_vmware.api [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Waiting for the task: (returnval){ [ 816.562292] env[69328]: value = "task-3273299" [ 816.562292] env[69328]: _type = "Task" [ 816.562292] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.571380] env[69328]: DEBUG oslo_vmware.api [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Task: {'id': task-3273299, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.642755] env[69328]: DEBUG nova.compute.utils [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 816.647604] env[69328]: DEBUG nova.compute.manager [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 816.648438] env[69328]: DEBUG nova.network.neutron [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 816.709643] env[69328]: DEBUG nova.policy [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '19265c910cd04814978013416bf2a18a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '636412f89c9d488a9cfd6f19ef046efc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 816.828370] env[69328]: DEBUG oslo_vmware.api [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Task: {'id': task-3273296, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069078} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.829545] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 816.830533] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-482206e1-a41d-4f73-abb2-d93b98b7e692 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.858623] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Reconfiguring VM instance instance-00000036 to attach disk [datastore1] b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b/b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 816.861661] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8a20ad5c-47e3-446a-bcdb-d356a736d334 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.888510] env[69328]: DEBUG oslo_vmware.api [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273297, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.890456] env[69328]: DEBUG oslo_vmware.api [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Waiting for the task: (returnval){ [ 816.890456] env[69328]: value = "task-3273300" [ 816.890456] env[69328]: _type = "Task" [ 816.890456] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.908606] env[69328]: DEBUG oslo_vmware.api [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Task: {'id': task-3273300, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.912451] env[69328]: DEBUG oslo_vmware.api [None req-4a1d308d-b267-4d92-84b1-af0eebcb5224 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Task: {'id': task-3273298, 'name': PowerOffVM_Task, 'duration_secs': 0.195089} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.912662] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a1d308d-b267-4d92-84b1-af0eebcb5224 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 816.912662] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4a1d308d-b267-4d92-84b1-af0eebcb5224 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 816.912994] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-79486539-8317-4ec7-bb32-5724e99f434f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.992905] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4a1d308d-b267-4d92-84b1-af0eebcb5224 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 816.993150] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4a1d308d-b267-4d92-84b1-af0eebcb5224 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 816.993337] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a1d308d-b267-4d92-84b1-af0eebcb5224 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Deleting the datastore file [datastore2] 07b1f872-02bc-471f-97d6-3a781075bee5 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 816.994059] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b12ed3e9-61c3-4042-91f3-59db03d49a44 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.003022] env[69328]: DEBUG oslo_vmware.api [None req-4a1d308d-b267-4d92-84b1-af0eebcb5224 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Waiting for the task: (returnval){ [ 817.003022] env[69328]: value = "task-3273302" [ 817.003022] env[69328]: _type = "Task" [ 817.003022] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.014456] env[69328]: DEBUG oslo_vmware.api [None req-4a1d308d-b267-4d92-84b1-af0eebcb5224 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Task: {'id': task-3273302, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.021563] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 817.079077] env[69328]: DEBUG oslo_vmware.api [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Task: {'id': task-3273299, 'name': PowerOnVM_Task} progress is 90%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.116901] env[69328]: DEBUG nova.network.neutron [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Successfully created port: ad43b1f6-e3ce-4362-856f-82909e1eb51c {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 817.148183] env[69328]: DEBUG nova.compute.manager [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 817.174780] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b912f579-fec4-494d-8db7-5c81aaa6d427 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.184043] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-735e8bbd-f2c4-42fb-91c0-4950fdf4bb17 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.218262] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bcf01ca-efc0-4c4b-8307-c71b2d287f21 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.227345] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e527a46c-902f-48d9-a21b-3be04ebc2bb7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.242162] env[69328]: DEBUG nova.compute.provider_tree [None req-da255cd1-6657-45d4-a96b-d26ccab796c8 tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 817.389308] env[69328]: DEBUG oslo_vmware.api [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273297, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.63702} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.389701] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 55f44102-2891-4b6c-b31e-e8255a24d180/55f44102-2891-4b6c-b31e-e8255a24d180.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 817.390016] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 817.390349] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3ee0c47f-1795-4ca1-9a61-284008f4ebd2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.400320] env[69328]: DEBUG oslo_vmware.api [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 817.400320] env[69328]: value = "task-3273303" [ 817.400320] env[69328]: _type = "Task" [ 817.400320] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.409683] env[69328]: DEBUG oslo_vmware.api [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Task: {'id': task-3273300, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.417374] env[69328]: DEBUG oslo_vmware.api [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273303, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.514229] env[69328]: DEBUG oslo_vmware.api [None req-4a1d308d-b267-4d92-84b1-af0eebcb5224 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Task: {'id': task-3273302, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.149807} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.514667] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a1d308d-b267-4d92-84b1-af0eebcb5224 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 817.514970] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4a1d308d-b267-4d92-84b1-af0eebcb5224 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 817.515233] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4a1d308d-b267-4d92-84b1-af0eebcb5224 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 817.515492] env[69328]: INFO nova.compute.manager [None req-4a1d308d-b267-4d92-84b1-af0eebcb5224 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] Took 1.14 seconds to destroy the instance on the hypervisor. [ 817.515871] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4a1d308d-b267-4d92-84b1-af0eebcb5224 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 817.516150] env[69328]: DEBUG nova.compute.manager [-] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 817.516302] env[69328]: DEBUG nova.network.neutron [-] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 817.574237] env[69328]: DEBUG oslo_vmware.api [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Task: {'id': task-3273299, 'name': PowerOnVM_Task, 'duration_secs': 0.779284} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.574582] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 817.574863] env[69328]: INFO nova.compute.manager [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] Took 8.92 seconds to spawn the instance on the hypervisor. [ 817.575113] env[69328]: DEBUG nova.compute.manager [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 817.576170] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ac2096b-4e79-4eff-99cc-102947934114 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.749163] env[69328]: DEBUG nova.scheduler.client.report [None req-da255cd1-6657-45d4-a96b-d26ccab796c8 tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 817.916024] env[69328]: DEBUG oslo_vmware.api [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Task: {'id': task-3273300, 'name': ReconfigVM_Task, 'duration_secs': 0.635995} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.917986] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Reconfigured VM instance instance-00000036 to attach disk [datastore1] b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b/b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 817.918793] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-10a3e180-02e5-490a-9f63-69ded3a815f2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.923871] env[69328]: DEBUG oslo_vmware.api [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273303, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081758} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.924693] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 817.925520] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d99518b4-edc2-4c5d-987a-39b29c506a04 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.930237] env[69328]: DEBUG oslo_vmware.api [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Waiting for the task: (returnval){ [ 817.930237] env[69328]: value = "task-3273304" [ 817.930237] env[69328]: _type = "Task" [ 817.930237] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.960065] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] 55f44102-2891-4b6c-b31e-e8255a24d180/55f44102-2891-4b6c-b31e-e8255a24d180.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 817.960914] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-896065d5-aee5-4f02-9333-f2fe9717a15f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.981749] env[69328]: DEBUG oslo_vmware.api [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Task: {'id': task-3273304, 'name': Rename_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.989388] env[69328]: DEBUG oslo_vmware.api [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 817.989388] env[69328]: value = "task-3273305" [ 817.989388] env[69328]: _type = "Task" [ 817.989388] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.000235] env[69328]: DEBUG oslo_vmware.api [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273305, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.019133] env[69328]: DEBUG nova.compute.manager [req-f182f376-639a-4bc5-909a-dff73b42c151 req-b9f40bdd-c0ce-433d-abb0-d0db1d5ce075 service nova] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] Received event network-vif-deleted-e638f147-afff-45b0-bf3e-f63debcc2d53 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 818.019501] env[69328]: INFO nova.compute.manager [req-f182f376-639a-4bc5-909a-dff73b42c151 req-b9f40bdd-c0ce-433d-abb0-d0db1d5ce075 service nova] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] Neutron deleted interface e638f147-afff-45b0-bf3e-f63debcc2d53; detaching it from the instance and deleting it from the info cache [ 818.019550] env[69328]: DEBUG nova.network.neutron [req-f182f376-639a-4bc5-909a-dff73b42c151 req-b9f40bdd-c0ce-433d-abb0-d0db1d5ce075 service nova] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 818.098744] env[69328]: INFO nova.compute.manager [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] Took 39.29 seconds to build instance. [ 818.160811] env[69328]: DEBUG nova.compute.manager [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 818.199138] env[69328]: DEBUG nova.virt.hardware [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 818.199427] env[69328]: DEBUG nova.virt.hardware [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 818.199593] env[69328]: DEBUG nova.virt.hardware [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 818.199802] env[69328]: DEBUG nova.virt.hardware [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 818.199948] env[69328]: DEBUG nova.virt.hardware [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 818.200079] env[69328]: DEBUG nova.virt.hardware [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 818.200333] env[69328]: DEBUG nova.virt.hardware [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 818.200512] env[69328]: DEBUG nova.virt.hardware [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 818.200684] env[69328]: DEBUG nova.virt.hardware [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 818.200849] env[69328]: DEBUG nova.virt.hardware [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 818.201032] env[69328]: DEBUG nova.virt.hardware [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 818.202093] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3d46a53-9a15-4f92-80e5-16950e3c2fbe {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.212799] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58d88d2d-d760-4769-9487-8313d94fd8ea {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.255798] env[69328]: DEBUG oslo_concurrency.lockutils [None req-da255cd1-6657-45d4-a96b-d26ccab796c8 tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.117s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 818.258227] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.476s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 818.260194] env[69328]: INFO nova.compute.claims [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 818.287859] env[69328]: INFO nova.scheduler.client.report [None req-da255cd1-6657-45d4-a96b-d26ccab796c8 tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Deleted allocations for instance f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe [ 818.441286] env[69328]: DEBUG oslo_vmware.api [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Task: {'id': task-3273304, 'name': Rename_Task, 'duration_secs': 0.162392} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.441592] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 818.441843] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5a351a7f-3dca-4843-afbf-dbaf1efbab9a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.449491] env[69328]: DEBUG oslo_vmware.api [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Waiting for the task: (returnval){ [ 818.449491] env[69328]: value = "task-3273306" [ 818.449491] env[69328]: _type = "Task" [ 818.449491] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.457906] env[69328]: DEBUG oslo_vmware.api [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Task: {'id': task-3273306, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.485068] env[69328]: DEBUG nova.network.neutron [-] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 818.502695] env[69328]: DEBUG oslo_vmware.api [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273305, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.524034] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cd921aea-1d8d-4a91-af1d-97cdb2f9091a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.532853] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c05af1f-93be-4184-88df-6d34fb4c7108 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.573889] env[69328]: DEBUG nova.compute.manager [req-f182f376-639a-4bc5-909a-dff73b42c151 req-b9f40bdd-c0ce-433d-abb0-d0db1d5ce075 service nova] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] Detach interface failed, port_id=e638f147-afff-45b0-bf3e-f63debcc2d53, reason: Instance 07b1f872-02bc-471f-97d6-3a781075bee5 could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 818.605704] env[69328]: DEBUG oslo_concurrency.lockutils [None req-04090dd8-8bbb-4b66-8c84-61207736831f tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Lock "ef7effe4-b37f-4fab-ad24-9d8f72a47ee2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 80.408s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 818.734152] env[69328]: DEBUG nova.compute.manager [req-c0d70138-9870-4cff-90fc-2a628c0764c3 req-390d7e80-ce52-42e3-a1cd-cadc1c7fbe31 service nova] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Received event network-vif-plugged-ad43b1f6-e3ce-4362-856f-82909e1eb51c {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 818.734435] env[69328]: DEBUG oslo_concurrency.lockutils [req-c0d70138-9870-4cff-90fc-2a628c0764c3 req-390d7e80-ce52-42e3-a1cd-cadc1c7fbe31 service nova] Acquiring lock "62fa6807-f67d-4bf5-ba23-9e97f9da120e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 818.734663] env[69328]: DEBUG oslo_concurrency.lockutils [req-c0d70138-9870-4cff-90fc-2a628c0764c3 req-390d7e80-ce52-42e3-a1cd-cadc1c7fbe31 service nova] Lock "62fa6807-f67d-4bf5-ba23-9e97f9da120e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 818.734839] env[69328]: DEBUG oslo_concurrency.lockutils [req-c0d70138-9870-4cff-90fc-2a628c0764c3 req-390d7e80-ce52-42e3-a1cd-cadc1c7fbe31 service nova] Lock "62fa6807-f67d-4bf5-ba23-9e97f9da120e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 818.735025] env[69328]: DEBUG nova.compute.manager [req-c0d70138-9870-4cff-90fc-2a628c0764c3 req-390d7e80-ce52-42e3-a1cd-cadc1c7fbe31 service nova] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] No waiting events found dispatching network-vif-plugged-ad43b1f6-e3ce-4362-856f-82909e1eb51c {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 818.735224] env[69328]: WARNING nova.compute.manager [req-c0d70138-9870-4cff-90fc-2a628c0764c3 req-390d7e80-ce52-42e3-a1cd-cadc1c7fbe31 service nova] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Received unexpected event network-vif-plugged-ad43b1f6-e3ce-4362-856f-82909e1eb51c for instance with vm_state building and task_state spawning. [ 818.799193] env[69328]: DEBUG oslo_concurrency.lockutils [None req-da255cd1-6657-45d4-a96b-d26ccab796c8 tempest-ServersAaction247Test-1873867370 tempest-ServersAaction247Test-1873867370-project-member] Lock "f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.240s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 818.960604] env[69328]: DEBUG oslo_vmware.api [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Task: {'id': task-3273306, 'name': PowerOnVM_Task, 'duration_secs': 0.442662} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.961965] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 818.962267] env[69328]: INFO nova.compute.manager [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Took 5.48 seconds to spawn the instance on the hypervisor. [ 818.962465] env[69328]: DEBUG nova.compute.manager [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 818.963260] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc6ed0d0-2f58-4447-8e9b-8bc6e6fd912b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.987856] env[69328]: INFO nova.compute.manager [-] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] Took 1.47 seconds to deallocate network for instance. [ 819.006531] env[69328]: DEBUG oslo_vmware.api [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273305, 'name': ReconfigVM_Task, 'duration_secs': 0.816238} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.006804] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Reconfigured VM instance instance-00000033 to attach disk [datastore1] 55f44102-2891-4b6c-b31e-e8255a24d180/55f44102-2891-4b6c-b31e-e8255a24d180.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 819.007875] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0268c8b8-2eb7-4c4a-b2e1-959b19ecd647 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.016797] env[69328]: DEBUG oslo_vmware.api [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 819.016797] env[69328]: value = "task-3273307" [ 819.016797] env[69328]: _type = "Task" [ 819.016797] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.026917] env[69328]: DEBUG oslo_vmware.api [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273307, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.095214] env[69328]: DEBUG nova.network.neutron [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Successfully updated port: ad43b1f6-e3ce-4362-856f-82909e1eb51c {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 819.108198] env[69328]: DEBUG nova.compute.manager [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 819.117366] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a6d11c68-7500-4e0e-a427-50944fb22977 tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Acquiring lock "interface-ef7effe4-b37f-4fab-ad24-9d8f72a47ee2-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 819.117528] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a6d11c68-7500-4e0e-a427-50944fb22977 tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Lock "interface-ef7effe4-b37f-4fab-ad24-9d8f72a47ee2-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 819.117849] env[69328]: DEBUG nova.objects.instance [None req-a6d11c68-7500-4e0e-a427-50944fb22977 tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Lazy-loading 'flavor' on Instance uuid ef7effe4-b37f-4fab-ad24-9d8f72a47ee2 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 819.492277] env[69328]: INFO nova.compute.manager [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Took 38.03 seconds to build instance. [ 819.501597] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a1d308d-b267-4d92-84b1-af0eebcb5224 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 819.528332] env[69328]: DEBUG oslo_vmware.api [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273307, 'name': Rename_Task, 'duration_secs': 0.391779} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.528435] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 819.533272] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5a552c10-1ec3-4c5a-a9b0-f27b2071ea32 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.541841] env[69328]: DEBUG oslo_vmware.api [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 819.541841] env[69328]: value = "task-3273308" [ 819.541841] env[69328]: _type = "Task" [ 819.541841] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.557898] env[69328]: DEBUG oslo_vmware.api [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273308, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.603239] env[69328]: DEBUG oslo_concurrency.lockutils [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquiring lock "refresh_cache-62fa6807-f67d-4bf5-ba23-9e97f9da120e" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.603459] env[69328]: DEBUG oslo_concurrency.lockutils [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquired lock "refresh_cache-62fa6807-f67d-4bf5-ba23-9e97f9da120e" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 819.603667] env[69328]: DEBUG nova.network.neutron [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 819.622233] env[69328]: DEBUG nova.objects.instance [None req-a6d11c68-7500-4e0e-a427-50944fb22977 tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Lazy-loading 'pci_requests' on Instance uuid ef7effe4-b37f-4fab-ad24-9d8f72a47ee2 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 819.653407] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 819.751026] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb2c5eae-4839-46c1-88d0-85e66cfb941e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.757431] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed6cd8a4-8cbb-4753-b6eb-8571eb6a3fac {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.794581] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45595fa2-1da4-4971-9da4-c71c4846cca6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.804225] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14f7e376-bb48-46cb-ae30-27310c09d850 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.821260] env[69328]: DEBUG nova.compute.provider_tree [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 819.998659] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e8a6bffa-7282-4f00-9d2a-d95b5b5a57cf tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Lock "b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.375s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 820.053247] env[69328]: DEBUG oslo_vmware.api [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273308, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.127855] env[69328]: DEBUG nova.objects.base [None req-a6d11c68-7500-4e0e-a427-50944fb22977 tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=69328) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 820.128104] env[69328]: DEBUG nova.network.neutron [None req-a6d11c68-7500-4e0e-a427-50944fb22977 tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 820.185141] env[69328]: DEBUG nova.network.neutron [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 820.211927] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a6d11c68-7500-4e0e-a427-50944fb22977 tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Lock "interface-ef7effe4-b37f-4fab-ad24-9d8f72a47ee2-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.094s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 820.324953] env[69328]: DEBUG nova.scheduler.client.report [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 820.458788] env[69328]: DEBUG nova.network.neutron [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Updating instance_info_cache with network_info: [{"id": "ad43b1f6-e3ce-4362-856f-82909e1eb51c", "address": "fa:16:3e:73:37:45", "network": {"id": "5b6ce910-5dc1-49bb-ad4b-fe1a86509fc3", "bridge": "br-int", "label": "tempest-ImagesTestJSON-224847435-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "636412f89c9d488a9cfd6f19ef046efc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d1e1e320-ec56-4fcc-b6e9-30aa210d3b36", "external-id": "nsx-vlan-transportzone-447", "segmentation_id": 447, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad43b1f6-e3", "ovs_interfaceid": "ad43b1f6-e3ce-4362-856f-82909e1eb51c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 820.553966] env[69328]: DEBUG oslo_vmware.api [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273308, 'name': PowerOnVM_Task, 'duration_secs': 0.656047} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.554290] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 820.554732] env[69328]: DEBUG nova.compute.manager [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 820.555305] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c5eaea4-6891-41e0-98cc-95ca56a24929 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.830536] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.572s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 820.831056] env[69328]: DEBUG nova.compute.manager [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 820.838810] env[69328]: DEBUG oslo_concurrency.lockutils [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.727s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 820.840982] env[69328]: INFO nova.compute.claims [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 820.962182] env[69328]: DEBUG oslo_concurrency.lockutils [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Releasing lock "refresh_cache-62fa6807-f67d-4bf5-ba23-9e97f9da120e" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 820.962571] env[69328]: DEBUG nova.compute.manager [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Instance network_info: |[{"id": "ad43b1f6-e3ce-4362-856f-82909e1eb51c", "address": "fa:16:3e:73:37:45", "network": {"id": "5b6ce910-5dc1-49bb-ad4b-fe1a86509fc3", "bridge": "br-int", "label": "tempest-ImagesTestJSON-224847435-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "636412f89c9d488a9cfd6f19ef046efc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d1e1e320-ec56-4fcc-b6e9-30aa210d3b36", "external-id": "nsx-vlan-transportzone-447", "segmentation_id": 447, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad43b1f6-e3", "ovs_interfaceid": "ad43b1f6-e3ce-4362-856f-82909e1eb51c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 820.962993] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:73:37:45', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd1e1e320-ec56-4fcc-b6e9-30aa210d3b36', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ad43b1f6-e3ce-4362-856f-82909e1eb51c', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 820.970756] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Creating folder: Project (636412f89c9d488a9cfd6f19ef046efc). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 820.971051] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ccc46aa6-aebd-49cd-bef8-44ff30b08cec {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.985186] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Created folder: Project (636412f89c9d488a9cfd6f19ef046efc) in parent group-v653649. [ 820.985186] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Creating folder: Instances. Parent ref: group-v653806. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 820.985186] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c9b2131f-5c0b-44b4-a53d-15d152ef9e8a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.995107] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Created folder: Instances in parent group-v653806. [ 820.995363] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 820.995565] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 820.995768] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-284a6eba-4dbb-4d7a-b989-8745fa76a6e1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.017028] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 821.017028] env[69328]: value = "task-3273311" [ 821.017028] env[69328]: _type = "Task" [ 821.017028] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.025872] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273311, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.066512] env[69328]: INFO nova.compute.manager [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] bringing vm to original state: 'stopped' [ 821.347022] env[69328]: DEBUG nova.compute.utils [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 821.348768] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "a0b663eb-31b0-4de1-94bc-660a7d9c1c7b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 821.349191] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "a0b663eb-31b0-4de1-94bc-660a7d9c1c7b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 821.353219] env[69328]: DEBUG nova.compute.manager [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 821.353896] env[69328]: DEBUG nova.network.neutron [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 821.405024] env[69328]: DEBUG nova.policy [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7fbdc3e734be4369884cfcf483b2678f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4e8bc0d144f44546bd21fb04277c998c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 821.530344] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273311, 'name': CreateVM_Task, 'duration_secs': 0.397834} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.530344] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 821.531228] env[69328]: DEBUG oslo_concurrency.lockutils [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.531441] env[69328]: DEBUG oslo_concurrency.lockutils [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 821.531776] env[69328]: DEBUG oslo_concurrency.lockutils [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 821.533283] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4902792f-8754-4550-b730-1d55a8f9480e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.538140] env[69328]: DEBUG oslo_vmware.api [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 821.538140] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52406f00-5095-4d0b-63cc-9e8020722ca8" [ 821.538140] env[69328]: _type = "Task" [ 821.538140] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.569444] env[69328]: DEBUG oslo_vmware.api [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52406f00-5095-4d0b-63cc-9e8020722ca8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.598229] env[69328]: DEBUG nova.compute.manager [req-d8cee816-edfa-4970-a8ab-7d8c9f74778d req-1294a993-0181-493a-a38a-970dc3f081db service nova] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Received event network-changed-ad43b1f6-e3ce-4362-856f-82909e1eb51c {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 821.598605] env[69328]: DEBUG nova.compute.manager [req-d8cee816-edfa-4970-a8ab-7d8c9f74778d req-1294a993-0181-493a-a38a-970dc3f081db service nova] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Refreshing instance network info cache due to event network-changed-ad43b1f6-e3ce-4362-856f-82909e1eb51c. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 821.598946] env[69328]: DEBUG oslo_concurrency.lockutils [req-d8cee816-edfa-4970-a8ab-7d8c9f74778d req-1294a993-0181-493a-a38a-970dc3f081db service nova] Acquiring lock "refresh_cache-62fa6807-f67d-4bf5-ba23-9e97f9da120e" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.598946] env[69328]: DEBUG oslo_concurrency.lockutils [req-d8cee816-edfa-4970-a8ab-7d8c9f74778d req-1294a993-0181-493a-a38a-970dc3f081db service nova] Acquired lock "refresh_cache-62fa6807-f67d-4bf5-ba23-9e97f9da120e" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 821.599084] env[69328]: DEBUG nova.network.neutron [req-d8cee816-edfa-4970-a8ab-7d8c9f74778d req-1294a993-0181-493a-a38a-970dc3f081db service nova] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Refreshing network info cache for port ad43b1f6-e3ce-4362-856f-82909e1eb51c {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 821.788358] env[69328]: DEBUG nova.network.neutron [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Successfully created port: 217c955e-5f6e-4245-be2d-e3bb84c2917f {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 821.849944] env[69328]: DEBUG nova.compute.manager [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 821.852796] env[69328]: DEBUG nova.compute.manager [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a0b663eb-31b0-4de1-94bc-660a7d9c1c7b] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 822.056699] env[69328]: DEBUG oslo_vmware.api [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52406f00-5095-4d0b-63cc-9e8020722ca8, 'name': SearchDatastore_Task, 'duration_secs': 0.011512} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.057171] env[69328]: DEBUG oslo_concurrency.lockutils [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 822.057303] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 822.057544] env[69328]: DEBUG oslo_concurrency.lockutils [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 822.057665] env[69328]: DEBUG oslo_concurrency.lockutils [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 822.057847] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 822.058250] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ff572475-b56c-40cc-948b-26f31353e087 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.068631] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 822.068819] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 822.069623] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ec5e99b-f7d2-49df-8885-904ffd4c5555 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.076796] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "55f44102-2891-4b6c-b31e-e8255a24d180" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 822.077178] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "55f44102-2891-4b6c-b31e-e8255a24d180" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 822.077490] env[69328]: DEBUG nova.compute.manager [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 822.077850] env[69328]: DEBUG oslo_vmware.api [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 822.077850] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52bf8035-4529-8e43-9b34-ffce84ce64f6" [ 822.077850] env[69328]: _type = "Task" [ 822.077850] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.078991] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6a43d45-aff0-45ef-92e1-eab428b5e62c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.106021] env[69328]: DEBUG oslo_vmware.api [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52bf8035-4529-8e43-9b34-ffce84ce64f6, 'name': SearchDatastore_Task, 'duration_secs': 0.010976} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.109216] env[69328]: DEBUG nova.compute.manager [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69328) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 822.112374] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-385124be-a86e-4e23-840e-ec4870023c02 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.122927] env[69328]: DEBUG oslo_vmware.api [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 822.122927] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]521bcf02-3283-9aca-cde7-f1aa616f36cb" [ 822.122927] env[69328]: _type = "Task" [ 822.122927] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.132948] env[69328]: DEBUG oslo_vmware.api [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]521bcf02-3283-9aca-cde7-f1aa616f36cb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.370628] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0da0cde-906f-46ac-8124-df6b20b3d400 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.381580] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb6f4d2b-3181-45f1-8dcb-020429cc91e1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.385854] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 822.417177] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2e298255-278a-43c8-b6fe-8da33338788a tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Acquiring lock "ef7effe4-b37f-4fab-ad24-9d8f72a47ee2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 822.417467] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2e298255-278a-43c8-b6fe-8da33338788a tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Lock "ef7effe4-b37f-4fab-ad24-9d8f72a47ee2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 822.417680] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2e298255-278a-43c8-b6fe-8da33338788a tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Acquiring lock "ef7effe4-b37f-4fab-ad24-9d8f72a47ee2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 822.417868] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2e298255-278a-43c8-b6fe-8da33338788a tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Lock "ef7effe4-b37f-4fab-ad24-9d8f72a47ee2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 822.418799] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2e298255-278a-43c8-b6fe-8da33338788a tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Lock "ef7effe4-b37f-4fab-ad24-9d8f72a47ee2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 822.420941] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-346a9e2d-e75c-45b2-ba07-cf2d698f3fc6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.423768] env[69328]: INFO nova.compute.manager [None req-2e298255-278a-43c8-b6fe-8da33338788a tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] Terminating instance [ 822.433271] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3cf4d86-a92d-441f-b535-52e8e0e7a3c8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.436738] env[69328]: DEBUG nova.network.neutron [req-d8cee816-edfa-4970-a8ab-7d8c9f74778d req-1294a993-0181-493a-a38a-970dc3f081db service nova] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Updated VIF entry in instance network info cache for port ad43b1f6-e3ce-4362-856f-82909e1eb51c. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 822.438047] env[69328]: DEBUG nova.network.neutron [req-d8cee816-edfa-4970-a8ab-7d8c9f74778d req-1294a993-0181-493a-a38a-970dc3f081db service nova] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Updating instance_info_cache with network_info: [{"id": "ad43b1f6-e3ce-4362-856f-82909e1eb51c", "address": "fa:16:3e:73:37:45", "network": {"id": "5b6ce910-5dc1-49bb-ad4b-fe1a86509fc3", "bridge": "br-int", "label": "tempest-ImagesTestJSON-224847435-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "636412f89c9d488a9cfd6f19ef046efc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d1e1e320-ec56-4fcc-b6e9-30aa210d3b36", "external-id": "nsx-vlan-transportzone-447", "segmentation_id": 447, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad43b1f6-e3", "ovs_interfaceid": "ad43b1f6-e3ce-4362-856f-82909e1eb51c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 822.451571] env[69328]: DEBUG nova.compute.provider_tree [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 822.597815] env[69328]: INFO nova.compute.manager [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Rebuilding instance [ 822.621562] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 822.623816] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f79f60b5-3879-4133-aeae-b39ddb8be1ae {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.636452] env[69328]: DEBUG oslo_vmware.api [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]521bcf02-3283-9aca-cde7-f1aa616f36cb, 'name': SearchDatastore_Task, 'duration_secs': 0.010133} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.642265] env[69328]: DEBUG oslo_concurrency.lockutils [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 822.642958] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 62fa6807-f67d-4bf5-ba23-9e97f9da120e/62fa6807-f67d-4bf5-ba23-9e97f9da120e.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 822.643157] env[69328]: DEBUG oslo_vmware.api [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 822.643157] env[69328]: value = "task-3273312" [ 822.643157] env[69328]: _type = "Task" [ 822.643157] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.643425] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7f971999-82a9-4042-bdbf-0252d12a8081 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.657725] env[69328]: DEBUG oslo_vmware.api [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273312, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.657725] env[69328]: DEBUG oslo_vmware.api [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 822.657725] env[69328]: value = "task-3273313" [ 822.657725] env[69328]: _type = "Task" [ 822.657725] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.667577] env[69328]: DEBUG oslo_vmware.api [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273313, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.677033] env[69328]: DEBUG nova.compute.manager [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 822.677955] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a12bb655-2f73-4c3f-8d73-e48e3518f9fb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.865916] env[69328]: DEBUG nova.compute.manager [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 822.894169] env[69328]: DEBUG nova.virt.hardware [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 822.894514] env[69328]: DEBUG nova.virt.hardware [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 822.894690] env[69328]: DEBUG nova.virt.hardware [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 822.894878] env[69328]: DEBUG nova.virt.hardware [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 822.895033] env[69328]: DEBUG nova.virt.hardware [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 822.895184] env[69328]: DEBUG nova.virt.hardware [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 822.895400] env[69328]: DEBUG nova.virt.hardware [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 822.896868] env[69328]: DEBUG nova.virt.hardware [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 822.896868] env[69328]: DEBUG nova.virt.hardware [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 822.896868] env[69328]: DEBUG nova.virt.hardware [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 822.896868] env[69328]: DEBUG nova.virt.hardware [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 822.897084] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3963cee-9d5a-4bb5-8d27-3de10e9e6deb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.906638] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6a450bc-4c39-461d-abb3-182b16d7d807 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.929029] env[69328]: DEBUG nova.compute.manager [None req-2e298255-278a-43c8-b6fe-8da33338788a tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 822.929271] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2e298255-278a-43c8-b6fe-8da33338788a tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 822.930256] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac7daf12-f0a7-4f76-aff9-5a4ff1433fcc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.941200] env[69328]: DEBUG oslo_concurrency.lockutils [req-d8cee816-edfa-4970-a8ab-7d8c9f74778d req-1294a993-0181-493a-a38a-970dc3f081db service nova] Releasing lock "refresh_cache-62fa6807-f67d-4bf5-ba23-9e97f9da120e" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 822.941789] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e298255-278a-43c8-b6fe-8da33338788a tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 822.942107] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-17d9da97-da5d-40f9-b2df-267cdef29374 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.952899] env[69328]: DEBUG oslo_vmware.api [None req-2e298255-278a-43c8-b6fe-8da33338788a tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Waiting for the task: (returnval){ [ 822.952899] env[69328]: value = "task-3273314" [ 822.952899] env[69328]: _type = "Task" [ 822.952899] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.957454] env[69328]: DEBUG nova.scheduler.client.report [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 822.971414] env[69328]: DEBUG oslo_vmware.api [None req-2e298255-278a-43c8-b6fe-8da33338788a tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Task: {'id': task-3273314, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.158643] env[69328]: DEBUG oslo_vmware.api [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273312, 'name': PowerOffVM_Task, 'duration_secs': 0.224053} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.163017] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 823.163280] env[69328]: DEBUG nova.compute.manager [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 823.166238] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-388ba936-2385-42a4-916c-369a06d7ef9b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.177546] env[69328]: DEBUG oslo_vmware.api [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273313, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.502683} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.178084] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 62fa6807-f67d-4bf5-ba23-9e97f9da120e/62fa6807-f67d-4bf5-ba23-9e97f9da120e.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 823.178373] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 823.178567] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0ffb875a-b47a-4e55-beb7-9fc350b68d20 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.186305] env[69328]: DEBUG oslo_vmware.api [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 823.186305] env[69328]: value = "task-3273315" [ 823.186305] env[69328]: _type = "Task" [ 823.186305] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.200011] env[69328]: DEBUG oslo_vmware.api [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273315, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.205287] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Acquiring lock "7232ad5c-9f4e-425e-824a-4c3750f665eb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 823.205637] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Lock "7232ad5c-9f4e-425e-824a-4c3750f665eb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 823.441065] env[69328]: DEBUG nova.network.neutron [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Successfully updated port: 217c955e-5f6e-4245-be2d-e3bb84c2917f {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 823.464802] env[69328]: DEBUG oslo_concurrency.lockutils [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.626s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 823.465588] env[69328]: DEBUG nova.compute.manager [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 823.468140] env[69328]: DEBUG oslo_vmware.api [None req-2e298255-278a-43c8-b6fe-8da33338788a tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Task: {'id': task-3273314, 'name': PowerOffVM_Task, 'duration_secs': 0.273348} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.468473] env[69328]: DEBUG oslo_concurrency.lockutils [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 32.779s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 823.470122] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e298255-278a-43c8-b6fe-8da33338788a tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 823.472204] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2e298255-278a-43c8-b6fe-8da33338788a tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 823.472204] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7bcd5cc8-8a06-474f-bf8b-431ba963ee15 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.545743] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2e298255-278a-43c8-b6fe-8da33338788a tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 823.545743] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2e298255-278a-43c8-b6fe-8da33338788a tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 823.545743] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e298255-278a-43c8-b6fe-8da33338788a tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Deleting the datastore file [datastore2] ef7effe4-b37f-4fab-ad24-9d8f72a47ee2 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 823.545743] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ca900926-5b8c-4e75-93b6-ac524af1259b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.552030] env[69328]: DEBUG oslo_vmware.api [None req-2e298255-278a-43c8-b6fe-8da33338788a tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Waiting for the task: (returnval){ [ 823.552030] env[69328]: value = "task-3273317" [ 823.552030] env[69328]: _type = "Task" [ 823.552030] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.561630] env[69328]: DEBUG oslo_vmware.api [None req-2e298255-278a-43c8-b6fe-8da33338788a tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Task: {'id': task-3273317, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.630291] env[69328]: DEBUG nova.compute.manager [req-aab5c2ef-528a-4f36-ab20-ccaea60ab2ea req-bce793ef-d6eb-4abf-bb7f-2f2486a73427 service nova] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Received event network-vif-plugged-217c955e-5f6e-4245-be2d-e3bb84c2917f {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 823.630291] env[69328]: DEBUG oslo_concurrency.lockutils [req-aab5c2ef-528a-4f36-ab20-ccaea60ab2ea req-bce793ef-d6eb-4abf-bb7f-2f2486a73427 service nova] Acquiring lock "3daf7b73-5679-47ce-b847-f3786f1000d4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 823.630291] env[69328]: DEBUG oslo_concurrency.lockutils [req-aab5c2ef-528a-4f36-ab20-ccaea60ab2ea req-bce793ef-d6eb-4abf-bb7f-2f2486a73427 service nova] Lock "3daf7b73-5679-47ce-b847-f3786f1000d4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 823.630291] env[69328]: DEBUG oslo_concurrency.lockutils [req-aab5c2ef-528a-4f36-ab20-ccaea60ab2ea req-bce793ef-d6eb-4abf-bb7f-2f2486a73427 service nova] Lock "3daf7b73-5679-47ce-b847-f3786f1000d4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 823.630453] env[69328]: DEBUG nova.compute.manager [req-aab5c2ef-528a-4f36-ab20-ccaea60ab2ea req-bce793ef-d6eb-4abf-bb7f-2f2486a73427 service nova] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] No waiting events found dispatching network-vif-plugged-217c955e-5f6e-4245-be2d-e3bb84c2917f {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 823.630771] env[69328]: WARNING nova.compute.manager [req-aab5c2ef-528a-4f36-ab20-ccaea60ab2ea req-bce793ef-d6eb-4abf-bb7f-2f2486a73427 service nova] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Received unexpected event network-vif-plugged-217c955e-5f6e-4245-be2d-e3bb84c2917f for instance with vm_state building and task_state spawning. [ 823.631072] env[69328]: DEBUG nova.compute.manager [req-aab5c2ef-528a-4f36-ab20-ccaea60ab2ea req-bce793ef-d6eb-4abf-bb7f-2f2486a73427 service nova] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Received event network-changed-217c955e-5f6e-4245-be2d-e3bb84c2917f {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 823.631422] env[69328]: DEBUG nova.compute.manager [req-aab5c2ef-528a-4f36-ab20-ccaea60ab2ea req-bce793ef-d6eb-4abf-bb7f-2f2486a73427 service nova] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Refreshing instance network info cache due to event network-changed-217c955e-5f6e-4245-be2d-e3bb84c2917f. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 823.631731] env[69328]: DEBUG oslo_concurrency.lockutils [req-aab5c2ef-528a-4f36-ab20-ccaea60ab2ea req-bce793ef-d6eb-4abf-bb7f-2f2486a73427 service nova] Acquiring lock "refresh_cache-3daf7b73-5679-47ce-b847-f3786f1000d4" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.631969] env[69328]: DEBUG oslo_concurrency.lockutils [req-aab5c2ef-528a-4f36-ab20-ccaea60ab2ea req-bce793ef-d6eb-4abf-bb7f-2f2486a73427 service nova] Acquired lock "refresh_cache-3daf7b73-5679-47ce-b847-f3786f1000d4" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 823.632242] env[69328]: DEBUG nova.network.neutron [req-aab5c2ef-528a-4f36-ab20-ccaea60ab2ea req-bce793ef-d6eb-4abf-bb7f-2f2486a73427 service nova] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Refreshing network info cache for port 217c955e-5f6e-4245-be2d-e3bb84c2917f {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 823.683853] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "55f44102-2891-4b6c-b31e-e8255a24d180" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.606s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 823.699515] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 823.700032] env[69328]: DEBUG oslo_vmware.api [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273315, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072009} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.700795] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-30c1d382-97f6-48ca-a371-59aef27e100d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.705188] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 823.705674] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38791903-42af-4ba7-920c-b04d4fe6b26a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.733232] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Reconfiguring VM instance instance-00000037 to attach disk [datastore2] 62fa6807-f67d-4bf5-ba23-9e97f9da120e/62fa6807-f67d-4bf5-ba23-9e97f9da120e.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 823.735164] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0427ba9c-87f7-42cc-abf1-2705e12a3c84 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.751270] env[69328]: DEBUG oslo_vmware.api [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Waiting for the task: (returnval){ [ 823.751270] env[69328]: value = "task-3273318" [ 823.751270] env[69328]: _type = "Task" [ 823.751270] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.762846] env[69328]: DEBUG oslo_vmware.api [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 823.762846] env[69328]: value = "task-3273319" [ 823.762846] env[69328]: _type = "Task" [ 823.762846] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.763151] env[69328]: DEBUG oslo_vmware.api [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Task: {'id': task-3273318, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.774132] env[69328]: DEBUG oslo_vmware.api [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273319, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.944450] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Acquiring lock "refresh_cache-3daf7b73-5679-47ce-b847-f3786f1000d4" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.972973] env[69328]: DEBUG nova.compute.utils [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 823.976732] env[69328]: INFO nova.compute.claims [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 823.980533] env[69328]: DEBUG nova.compute.manager [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 823.980706] env[69328]: DEBUG nova.network.neutron [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 824.040154] env[69328]: DEBUG nova.policy [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7a1e946caadd413985d7965125000b07', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f71a072b33154efe9636b50e25f93381', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 824.063311] env[69328]: DEBUG oslo_vmware.api [None req-2e298255-278a-43c8-b6fe-8da33338788a tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Task: {'id': task-3273317, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.156469} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.063624] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e298255-278a-43c8-b6fe-8da33338788a tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 824.063830] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2e298255-278a-43c8-b6fe-8da33338788a tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 824.064049] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2e298255-278a-43c8-b6fe-8da33338788a tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 824.064233] env[69328]: INFO nova.compute.manager [None req-2e298255-278a-43c8-b6fe-8da33338788a tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] Took 1.14 seconds to destroy the instance on the hypervisor. [ 824.064482] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2e298255-278a-43c8-b6fe-8da33338788a tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 824.064676] env[69328]: DEBUG nova.compute.manager [-] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 824.064769] env[69328]: DEBUG nova.network.neutron [-] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 824.194849] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 824.197884] env[69328]: DEBUG nova.network.neutron [req-aab5c2ef-528a-4f36-ab20-ccaea60ab2ea req-bce793ef-d6eb-4abf-bb7f-2f2486a73427 service nova] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 824.265278] env[69328]: DEBUG oslo_vmware.api [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Task: {'id': task-3273318, 'name': PowerOffVM_Task, 'duration_secs': 0.137983} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.267837] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 824.268093] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 824.269420] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7da77f2b-410e-474f-8348-16d127f1e9de {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.281252] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 824.281563] env[69328]: DEBUG oslo_vmware.api [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273319, 'name': ReconfigVM_Task, 'duration_secs': 0.346988} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.281765] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-385988b7-eebe-4059-8917-9facaf20a776 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.283437] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Reconfigured VM instance instance-00000037 to attach disk [datastore2] 62fa6807-f67d-4bf5-ba23-9e97f9da120e/62fa6807-f67d-4bf5-ba23-9e97f9da120e.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 824.284038] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-33d88032-fa9c-4a37-8183-bc8bb6761f26 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.294247] env[69328]: DEBUG oslo_vmware.api [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 824.294247] env[69328]: value = "task-3273320" [ 824.294247] env[69328]: _type = "Task" [ 824.294247] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.302190] env[69328]: DEBUG oslo_vmware.api [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273320, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.320137] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 824.320389] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 824.320591] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Deleting the datastore file [datastore1] b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 824.320873] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-31ff40f5-11b9-46ef-a53d-984fcf3281a2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.328973] env[69328]: DEBUG oslo_vmware.api [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Waiting for the task: (returnval){ [ 824.328973] env[69328]: value = "task-3273322" [ 824.328973] env[69328]: _type = "Task" [ 824.328973] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.339589] env[69328]: DEBUG oslo_vmware.api [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Task: {'id': task-3273322, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.369565] env[69328]: DEBUG nova.network.neutron [req-aab5c2ef-528a-4f36-ab20-ccaea60ab2ea req-bce793ef-d6eb-4abf-bb7f-2f2486a73427 service nova] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 824.430602] env[69328]: DEBUG nova.network.neutron [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Successfully created port: cf26672a-3aeb-4534-8776-36a45511c5b4 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 824.481816] env[69328]: DEBUG nova.compute.manager [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 824.486073] env[69328]: INFO nova.compute.resource_tracker [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Updating resource usage from migration b5ff88a7-0c06-459d-8382-fae134bf7dff [ 824.736599] env[69328]: DEBUG nova.compute.manager [req-9ea7d983-b6b1-40c4-8a61-b1e4e4b7ce76 req-0bb13797-7426-4b0d-9cb2-99d2a1ec3701 service nova] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] Received event network-vif-deleted-54919328-7a97-481c-bd6d-056207108b76 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 824.736762] env[69328]: INFO nova.compute.manager [req-9ea7d983-b6b1-40c4-8a61-b1e4e4b7ce76 req-0bb13797-7426-4b0d-9cb2-99d2a1ec3701 service nova] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] Neutron deleted interface 54919328-7a97-481c-bd6d-056207108b76; detaching it from the instance and deleting it from the info cache [ 824.736905] env[69328]: DEBUG nova.network.neutron [req-9ea7d983-b6b1-40c4-8a61-b1e4e4b7ce76 req-0bb13797-7426-4b0d-9cb2-99d2a1ec3701 service nova] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 824.805150] env[69328]: DEBUG oslo_vmware.api [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273320, 'name': Rename_Task, 'duration_secs': 0.182597} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.805150] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 824.805150] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f0c0178c-166e-4599-80e0-bf77c9b29feb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.809118] env[69328]: DEBUG nova.network.neutron [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Successfully created port: f3f139e6-24e0-47d6-8700-cdcaec9d0b1b {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 824.812631] env[69328]: DEBUG oslo_vmware.api [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 824.812631] env[69328]: value = "task-3273323" [ 824.812631] env[69328]: _type = "Task" [ 824.812631] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.823011] env[69328]: DEBUG oslo_vmware.api [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273323, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.839775] env[69328]: DEBUG oslo_vmware.api [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Task: {'id': task-3273322, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.122807} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.840021] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 824.840214] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 824.840392] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 824.872361] env[69328]: DEBUG oslo_concurrency.lockutils [req-aab5c2ef-528a-4f36-ab20-ccaea60ab2ea req-bce793ef-d6eb-4abf-bb7f-2f2486a73427 service nova] Releasing lock "refresh_cache-3daf7b73-5679-47ce-b847-f3786f1000d4" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 824.872755] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Acquired lock "refresh_cache-3daf7b73-5679-47ce-b847-f3786f1000d4" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 824.872914] env[69328]: DEBUG nova.network.neutron [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 824.956671] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51c0a9b1-81a1-4f5a-9ef8-b7dfd016e4d6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.970564] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd323ac0-0d2c-43ed-b9b1-81069a24e89e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.003683] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b57174cd-2d79-439f-8684-07cfdbfd919b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "55f44102-2891-4b6c-b31e-e8255a24d180" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 825.003975] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b57174cd-2d79-439f-8684-07cfdbfd919b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "55f44102-2891-4b6c-b31e-e8255a24d180" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 825.004190] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b57174cd-2d79-439f-8684-07cfdbfd919b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "55f44102-2891-4b6c-b31e-e8255a24d180-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 825.004377] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b57174cd-2d79-439f-8684-07cfdbfd919b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "55f44102-2891-4b6c-b31e-e8255a24d180-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 825.004546] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b57174cd-2d79-439f-8684-07cfdbfd919b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "55f44102-2891-4b6c-b31e-e8255a24d180-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 825.006300] env[69328]: INFO nova.compute.manager [None req-b57174cd-2d79-439f-8684-07cfdbfd919b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Terminating instance [ 825.011170] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59ea33d7-08e2-40ca-b4bd-cc678bed9a57 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.017907] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-924a7a94-51ea-4520-a656-18f2e418b719 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.035185] env[69328]: DEBUG nova.network.neutron [-] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 825.036396] env[69328]: DEBUG nova.compute.provider_tree [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 825.100928] env[69328]: DEBUG nova.network.neutron [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Successfully created port: 046427f3-7078-436b-be68-5df86aa70395 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 825.241130] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c2947b7f-707b-472e-98f4-60913c61b123 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.252730] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19e4113e-3ea0-4a03-b476-3875ac66f828 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.293852] env[69328]: DEBUG nova.compute.manager [req-9ea7d983-b6b1-40c4-8a61-b1e4e4b7ce76 req-0bb13797-7426-4b0d-9cb2-99d2a1ec3701 service nova] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] Detach interface failed, port_id=54919328-7a97-481c-bd6d-056207108b76, reason: Instance ef7effe4-b37f-4fab-ad24-9d8f72a47ee2 could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 825.326413] env[69328]: DEBUG oslo_vmware.api [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273323, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.407122] env[69328]: DEBUG nova.network.neutron [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 825.509760] env[69328]: DEBUG nova.compute.manager [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 825.512507] env[69328]: DEBUG nova.compute.manager [None req-b57174cd-2d79-439f-8684-07cfdbfd919b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 825.513023] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b57174cd-2d79-439f-8684-07cfdbfd919b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 825.515979] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82970cd8-5da4-46a7-8281-376f783393c5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.524702] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b57174cd-2d79-439f-8684-07cfdbfd919b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 825.525016] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-14f2c678-8e40-422d-87dd-de2992580f5f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.536201] env[69328]: DEBUG nova.virt.hardware [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 825.536452] env[69328]: DEBUG nova.virt.hardware [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 825.536609] env[69328]: DEBUG nova.virt.hardware [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 825.536791] env[69328]: DEBUG nova.virt.hardware [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 825.537017] env[69328]: DEBUG nova.virt.hardware [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 825.537124] env[69328]: DEBUG nova.virt.hardware [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 825.537335] env[69328]: DEBUG nova.virt.hardware [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 825.537494] env[69328]: DEBUG nova.virt.hardware [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 825.537660] env[69328]: DEBUG nova.virt.hardware [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 825.537821] env[69328]: DEBUG nova.virt.hardware [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 825.538009] env[69328]: DEBUG nova.virt.hardware [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 825.538516] env[69328]: INFO nova.compute.manager [-] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] Took 1.47 seconds to deallocate network for instance. [ 825.539294] env[69328]: DEBUG nova.scheduler.client.report [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 825.543768] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b3824ea-09c9-49e6-a346-b4e8783f4aaa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.559101] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-821bcf62-247c-4c69-a39f-05f66eb50aa4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.602133] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b57174cd-2d79-439f-8684-07cfdbfd919b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 825.602378] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b57174cd-2d79-439f-8684-07cfdbfd919b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 825.602559] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-b57174cd-2d79-439f-8684-07cfdbfd919b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Deleting the datastore file [datastore1] 55f44102-2891-4b6c-b31e-e8255a24d180 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 825.603190] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-06dce9c8-28da-4efe-ae8d-18d2160086be {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.610455] env[69328]: DEBUG oslo_vmware.api [None req-b57174cd-2d79-439f-8684-07cfdbfd919b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 825.610455] env[69328]: value = "task-3273325" [ 825.610455] env[69328]: _type = "Task" [ 825.610455] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.618790] env[69328]: DEBUG oslo_vmware.api [None req-b57174cd-2d79-439f-8684-07cfdbfd919b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273325, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.620395] env[69328]: DEBUG nova.network.neutron [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Updating instance_info_cache with network_info: [{"id": "217c955e-5f6e-4245-be2d-e3bb84c2917f", "address": "fa:16:3e:72:5c:2f", "network": {"id": "edfe6995-ac16-4086-9cb4-efcda93045c6", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-703565163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e8bc0d144f44546bd21fb04277c998c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "196c2dd2-7ffc-4f7d-9c93-e1ef0a6a3a9f", "external-id": "nsx-vlan-transportzone-584", "segmentation_id": 584, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap217c955e-5f", "ovs_interfaceid": "217c955e-5f6e-4245-be2d-e3bb84c2917f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 825.824181] env[69328]: DEBUG oslo_vmware.api [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273323, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.871599] env[69328]: DEBUG nova.virt.hardware [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 825.871876] env[69328]: DEBUG nova.virt.hardware [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 825.871992] env[69328]: DEBUG nova.virt.hardware [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 825.872190] env[69328]: DEBUG nova.virt.hardware [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 825.872355] env[69328]: DEBUG nova.virt.hardware [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 825.872503] env[69328]: DEBUG nova.virt.hardware [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 825.872705] env[69328]: DEBUG nova.virt.hardware [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 825.872859] env[69328]: DEBUG nova.virt.hardware [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 825.873084] env[69328]: DEBUG nova.virt.hardware [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 825.873187] env[69328]: DEBUG nova.virt.hardware [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 825.873353] env[69328]: DEBUG nova.virt.hardware [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 825.874262] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b29d2cfb-c41e-492c-ad2d-96c361a6a5db {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.882790] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c59c188d-3291-4353-8928-34184da64bcc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.896923] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Instance VIF info [] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 825.902471] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 825.902725] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 825.902955] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5e6fe519-ee41-4ff0-b79a-47010b5f83ae {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.920542] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 825.920542] env[69328]: value = "task-3273326" [ 825.920542] env[69328]: _type = "Task" [ 825.920542] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.929951] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273326, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.044284] env[69328]: DEBUG oslo_concurrency.lockutils [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.576s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 826.044535] env[69328]: INFO nova.compute.manager [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Migrating [ 826.051657] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a15a5191-3036-4059-83a7-dcdee25dcac7 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.034s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 826.052027] env[69328]: DEBUG nova.objects.instance [None req-a15a5191-3036-4059-83a7-dcdee25dcac7 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Lazy-loading 'resources' on Instance uuid e92953f4-b634-4ef9-a5ad-63a886cfa007 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 826.053978] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2e298255-278a-43c8-b6fe-8da33338788a tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 826.123074] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Releasing lock "refresh_cache-3daf7b73-5679-47ce-b847-f3786f1000d4" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 826.123390] env[69328]: DEBUG nova.compute.manager [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Instance network_info: |[{"id": "217c955e-5f6e-4245-be2d-e3bb84c2917f", "address": "fa:16:3e:72:5c:2f", "network": {"id": "edfe6995-ac16-4086-9cb4-efcda93045c6", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-703565163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e8bc0d144f44546bd21fb04277c998c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "196c2dd2-7ffc-4f7d-9c93-e1ef0a6a3a9f", "external-id": "nsx-vlan-transportzone-584", "segmentation_id": 584, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap217c955e-5f", "ovs_interfaceid": "217c955e-5f6e-4245-be2d-e3bb84c2917f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 826.123683] env[69328]: DEBUG oslo_vmware.api [None req-b57174cd-2d79-439f-8684-07cfdbfd919b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273325, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.230281} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.124064] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:72:5c:2f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '196c2dd2-7ffc-4f7d-9c93-e1ef0a6a3a9f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '217c955e-5f6e-4245-be2d-e3bb84c2917f', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 826.131723] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 826.131940] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-b57174cd-2d79-439f-8684-07cfdbfd919b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 826.132127] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b57174cd-2d79-439f-8684-07cfdbfd919b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 826.132312] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b57174cd-2d79-439f-8684-07cfdbfd919b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 826.132500] env[69328]: INFO nova.compute.manager [None req-b57174cd-2d79-439f-8684-07cfdbfd919b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Took 0.62 seconds to destroy the instance on the hypervisor. [ 826.132736] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b57174cd-2d79-439f-8684-07cfdbfd919b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 826.133291] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 826.133489] env[69328]: DEBUG nova.compute.manager [-] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 826.133606] env[69328]: DEBUG nova.network.neutron [-] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 826.135279] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8730c248-6dea-4d0b-99fd-d701c4ee5d94 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.156842] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 826.156842] env[69328]: value = "task-3273327" [ 826.156842] env[69328]: _type = "Task" [ 826.156842] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.165299] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273327, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.325028] env[69328]: DEBUG oslo_vmware.api [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273323, 'name': PowerOnVM_Task, 'duration_secs': 1.185701} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.325338] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 826.325603] env[69328]: INFO nova.compute.manager [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Took 8.16 seconds to spawn the instance on the hypervisor. [ 826.325922] env[69328]: DEBUG nova.compute.manager [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 826.327297] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70b6f379-94d3-4035-83e0-bfbc324913d0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.435220] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273326, 'name': CreateVM_Task, 'duration_secs': 0.474541} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.435220] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 826.435220] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 826.435220] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 826.435220] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 826.435220] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70879871-0945-4527-8669-be6a7be389ea {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.439645] env[69328]: DEBUG oslo_vmware.api [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Waiting for the task: (returnval){ [ 826.439645] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52c254ac-c3cc-6d2f-ad3b-68cf22a9d764" [ 826.439645] env[69328]: _type = "Task" [ 826.439645] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.447956] env[69328]: DEBUG oslo_vmware.api [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52c254ac-c3cc-6d2f-ad3b-68cf22a9d764, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.548974] env[69328]: DEBUG nova.compute.manager [req-89fbc61a-73e3-455b-96dc-b3ba7c820989 req-cd255130-f884-43ad-baff-c30bd3165cf3 service nova] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Received event network-vif-plugged-cf26672a-3aeb-4534-8776-36a45511c5b4 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 826.549115] env[69328]: DEBUG oslo_concurrency.lockutils [req-89fbc61a-73e3-455b-96dc-b3ba7c820989 req-cd255130-f884-43ad-baff-c30bd3165cf3 service nova] Acquiring lock "d045c9ca-71f9-411e-9048-71b36c32f4b2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 826.549369] env[69328]: DEBUG oslo_concurrency.lockutils [req-89fbc61a-73e3-455b-96dc-b3ba7c820989 req-cd255130-f884-43ad-baff-c30bd3165cf3 service nova] Lock "d045c9ca-71f9-411e-9048-71b36c32f4b2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 826.549558] env[69328]: DEBUG oslo_concurrency.lockutils [req-89fbc61a-73e3-455b-96dc-b3ba7c820989 req-cd255130-f884-43ad-baff-c30bd3165cf3 service nova] Lock "d045c9ca-71f9-411e-9048-71b36c32f4b2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 826.549732] env[69328]: DEBUG nova.compute.manager [req-89fbc61a-73e3-455b-96dc-b3ba7c820989 req-cd255130-f884-43ad-baff-c30bd3165cf3 service nova] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] No waiting events found dispatching network-vif-plugged-cf26672a-3aeb-4534-8776-36a45511c5b4 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 826.549894] env[69328]: WARNING nova.compute.manager [req-89fbc61a-73e3-455b-96dc-b3ba7c820989 req-cd255130-f884-43ad-baff-c30bd3165cf3 service nova] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Received unexpected event network-vif-plugged-cf26672a-3aeb-4534-8776-36a45511c5b4 for instance with vm_state building and task_state spawning. [ 826.564502] env[69328]: DEBUG oslo_concurrency.lockutils [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Acquiring lock "refresh_cache-25fb207b-9388-4198-bb48-ab7cebd43375" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 826.564694] env[69328]: DEBUG oslo_concurrency.lockutils [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Acquired lock "refresh_cache-25fb207b-9388-4198-bb48-ab7cebd43375" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 826.564864] env[69328]: DEBUG nova.network.neutron [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 826.667417] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273327, 'name': CreateVM_Task, 'duration_secs': 0.472677} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.669819] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 826.670667] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 826.670835] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 826.671163] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 826.671476] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c994627-8221-48e8-9d35-01596cf8935c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.676391] env[69328]: DEBUG oslo_vmware.api [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Waiting for the task: (returnval){ [ 826.676391] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d8022b-e429-e89d-232d-c51a88f11982" [ 826.676391] env[69328]: _type = "Task" [ 826.676391] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.685953] env[69328]: DEBUG oslo_vmware.api [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d8022b-e429-e89d-232d-c51a88f11982, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.703131] env[69328]: DEBUG nova.network.neutron [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Successfully updated port: cf26672a-3aeb-4534-8776-36a45511c5b4 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 826.776110] env[69328]: DEBUG nova.compute.manager [req-c417a369-fabd-4316-9baa-47e07382b133 req-f6bba685-06a6-4e4a-860b-096bda7134bd service nova] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Received event network-vif-deleted-87b2e37a-d778-4bd1-a107-6132378b5f4c {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 826.776110] env[69328]: INFO nova.compute.manager [req-c417a369-fabd-4316-9baa-47e07382b133 req-f6bba685-06a6-4e4a-860b-096bda7134bd service nova] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Neutron deleted interface 87b2e37a-d778-4bd1-a107-6132378b5f4c; detaching it from the instance and deleting it from the info cache [ 826.776110] env[69328]: DEBUG nova.network.neutron [req-c417a369-fabd-4316-9baa-47e07382b133 req-f6bba685-06a6-4e4a-860b-096bda7134bd service nova] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 826.846727] env[69328]: INFO nova.compute.manager [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Took 40.97 seconds to build instance. [ 826.900337] env[69328]: DEBUG nova.network.neutron [-] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 826.950769] env[69328]: DEBUG oslo_vmware.api [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52c254ac-c3cc-6d2f-ad3b-68cf22a9d764, 'name': SearchDatastore_Task, 'duration_secs': 0.015107} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.953363] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 826.953627] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 826.953863] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 826.954016] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 826.954206] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 826.954642] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cbc9096c-ae99-4eee-ba2b-0a21fb2e38e5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.968065] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 826.968065] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 826.968482] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2fd0bdb0-e997-4afe-922b-63ee008855fd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.975222] env[69328]: DEBUG oslo_vmware.api [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Waiting for the task: (returnval){ [ 826.975222] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5221d046-4644-65d6-659b-684aa9cfa043" [ 826.975222] env[69328]: _type = "Task" [ 826.975222] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.986770] env[69328]: DEBUG oslo_vmware.api [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5221d046-4644-65d6-659b-684aa9cfa043, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.990879] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d209a0c-0ca8-4a19-a6b3-dc016b01868d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.998031] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbfe80a8-c1a8-4926-b729-86d0c285e409 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.031738] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02bf3bf7-ce51-4126-b31c-a001fade42c1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.039792] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-161ce856-86ed-49d9-99f8-e0d33f58f423 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.053773] env[69328]: DEBUG nova.compute.provider_tree [None req-a15a5191-3036-4059-83a7-dcdee25dcac7 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 827.190200] env[69328]: DEBUG oslo_vmware.api [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d8022b-e429-e89d-232d-c51a88f11982, 'name': SearchDatastore_Task, 'duration_secs': 0.035586} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.190546] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 827.190803] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 827.191085] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 827.191253] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 827.191438] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 827.191717] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5aa7dcc1-2da4-44a0-a385-875306927c57 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.202509] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 827.202707] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 827.206995] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7032fa5-29d5-4f06-a279-51a55fca6434 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.211253] env[69328]: DEBUG oslo_vmware.api [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Waiting for the task: (returnval){ [ 827.211253] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5247b602-1616-151b-5be8-4405baa9df07" [ 827.211253] env[69328]: _type = "Task" [ 827.211253] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.221282] env[69328]: DEBUG oslo_vmware.api [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5247b602-1616-151b-5be8-4405baa9df07, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.278740] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7e1a9da9-7b4c-4a04-ac63-8d83b90004f6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.288685] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daf74bae-850b-4bb8-a0e2-854c235bfd14 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.299699] env[69328]: DEBUG nova.network.neutron [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Updating instance_info_cache with network_info: [{"id": "32db9785-1822-4acf-9971-06db92f35c18", "address": "fa:16:3e:3e:aa:b8", "network": {"id": "4707aea3-ce46-4fe0-bf5c-141ee5596a86", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.119", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ecd184c6b78b4e4297fb93abb94aa37d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32db9785-18", "ovs_interfaceid": "32db9785-1822-4acf-9971-06db92f35c18", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 827.325939] env[69328]: DEBUG oslo_concurrency.lockutils [None req-13da97df-4b58-47dc-82b5-0277fc90698d tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquiring lock "62fa6807-f67d-4bf5-ba23-9e97f9da120e" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 827.326320] env[69328]: DEBUG nova.compute.manager [req-c417a369-fabd-4316-9baa-47e07382b133 req-f6bba685-06a6-4e4a-860b-096bda7134bd service nova] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Detach interface failed, port_id=87b2e37a-d778-4bd1-a107-6132378b5f4c, reason: Instance 55f44102-2891-4b6c-b31e-e8255a24d180 could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 827.349048] env[69328]: DEBUG oslo_concurrency.lockutils [None req-eed0aa7a-d1e5-40a9-bfd2-aa3d108cb6c1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lock "62fa6807-f67d-4bf5-ba23-9e97f9da120e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 62.751s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 827.350268] env[69328]: DEBUG oslo_concurrency.lockutils [None req-13da97df-4b58-47dc-82b5-0277fc90698d tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lock "62fa6807-f67d-4bf5-ba23-9e97f9da120e" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.024s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 827.350457] env[69328]: DEBUG nova.compute.manager [None req-13da97df-4b58-47dc-82b5-0277fc90698d tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 827.351683] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8a8170e-4287-4b04-94a4-51b0f06b368d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.359436] env[69328]: DEBUG nova.compute.manager [None req-13da97df-4b58-47dc-82b5-0277fc90698d tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69328) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 827.361776] env[69328]: DEBUG nova.objects.instance [None req-13da97df-4b58-47dc-82b5-0277fc90698d tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lazy-loading 'flavor' on Instance uuid 62fa6807-f67d-4bf5-ba23-9e97f9da120e {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 827.404995] env[69328]: INFO nova.compute.manager [-] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Took 1.27 seconds to deallocate network for instance. [ 827.486311] env[69328]: DEBUG oslo_vmware.api [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5221d046-4644-65d6-659b-684aa9cfa043, 'name': SearchDatastore_Task, 'duration_secs': 0.031416} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.487109] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8f2e1c0-bdb6-467b-b6ac-93e80d4d9b6b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.492712] env[69328]: DEBUG oslo_vmware.api [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Waiting for the task: (returnval){ [ 827.492712] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52fc0f79-9580-7673-6a46-4600e4b223d6" [ 827.492712] env[69328]: _type = "Task" [ 827.492712] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.500847] env[69328]: DEBUG oslo_vmware.api [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52fc0f79-9580-7673-6a46-4600e4b223d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.557410] env[69328]: DEBUG nova.scheduler.client.report [None req-a15a5191-3036-4059-83a7-dcdee25dcac7 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 827.722385] env[69328]: DEBUG oslo_vmware.api [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5247b602-1616-151b-5be8-4405baa9df07, 'name': SearchDatastore_Task, 'duration_secs': 0.024315} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.722879] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bb600343-4501-49a3-b98c-a9ad7c2e89ac {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.728146] env[69328]: DEBUG oslo_vmware.api [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Waiting for the task: (returnval){ [ 827.728146] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b00f23-696a-dda2-2ef4-3d11e4111116" [ 827.728146] env[69328]: _type = "Task" [ 827.728146] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.735771] env[69328]: DEBUG oslo_vmware.api [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b00f23-696a-dda2-2ef4-3d11e4111116, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.802956] env[69328]: DEBUG oslo_concurrency.lockutils [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Releasing lock "refresh_cache-25fb207b-9388-4198-bb48-ab7cebd43375" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 827.851856] env[69328]: DEBUG nova.compute.manager [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] [instance: 7232ad5c-9f4e-425e-824a-4c3750f665eb] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 827.911690] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b57174cd-2d79-439f-8684-07cfdbfd919b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 828.003959] env[69328]: DEBUG oslo_vmware.api [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52fc0f79-9580-7673-6a46-4600e4b223d6, 'name': SearchDatastore_Task, 'duration_secs': 0.010444} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.004245] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 828.004727] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b/b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 828.004980] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-57f2d25a-3baa-42a6-be93-607d84b73951 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.012787] env[69328]: DEBUG oslo_vmware.api [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Waiting for the task: (returnval){ [ 828.012787] env[69328]: value = "task-3273328" [ 828.012787] env[69328]: _type = "Task" [ 828.012787] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.021706] env[69328]: DEBUG oslo_vmware.api [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Task: {'id': task-3273328, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.063237] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a15a5191-3036-4059-83a7-dcdee25dcac7 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.011s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 828.065936] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.827s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 828.067620] env[69328]: INFO nova.compute.claims [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 828.090972] env[69328]: INFO nova.scheduler.client.report [None req-a15a5191-3036-4059-83a7-dcdee25dcac7 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Deleted allocations for instance e92953f4-b634-4ef9-a5ad-63a886cfa007 [ 828.240820] env[69328]: DEBUG oslo_vmware.api [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b00f23-696a-dda2-2ef4-3d11e4111116, 'name': SearchDatastore_Task, 'duration_secs': 0.011615} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.241114] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 828.241418] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 3daf7b73-5679-47ce-b847-f3786f1000d4/3daf7b73-5679-47ce-b847-f3786f1000d4.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 828.241694] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a82b93e5-931a-43a6-9c8f-f9bd09fc1c47 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.252740] env[69328]: DEBUG oslo_vmware.api [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Waiting for the task: (returnval){ [ 828.252740] env[69328]: value = "task-3273329" [ 828.252740] env[69328]: _type = "Task" [ 828.252740] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.265284] env[69328]: DEBUG oslo_vmware.api [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273329, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.367231] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-13da97df-4b58-47dc-82b5-0277fc90698d tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 828.367597] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f769156f-4b03-4e2e-b9bb-c5314b49380d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.374904] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 828.378018] env[69328]: DEBUG oslo_vmware.api [None req-13da97df-4b58-47dc-82b5-0277fc90698d tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 828.378018] env[69328]: value = "task-3273330" [ 828.378018] env[69328]: _type = "Task" [ 828.378018] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.390555] env[69328]: DEBUG oslo_vmware.api [None req-13da97df-4b58-47dc-82b5-0277fc90698d tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273330, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.523172] env[69328]: DEBUG oslo_vmware.api [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Task: {'id': task-3273328, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.480109} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.523479] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b/b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 828.523782] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 828.523945] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fbde806e-e4ea-4e9e-b24c-bf972c5c7de3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.531111] env[69328]: DEBUG oslo_vmware.api [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Waiting for the task: (returnval){ [ 828.531111] env[69328]: value = "task-3273331" [ 828.531111] env[69328]: _type = "Task" [ 828.531111] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.542649] env[69328]: DEBUG oslo_vmware.api [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Task: {'id': task-3273331, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.591453] env[69328]: DEBUG nova.compute.manager [req-421687f0-2682-42b0-9f23-133a24bd72c7 req-508fef02-4bbd-48d7-bed6-d003a3d292ea service nova] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Received event network-changed-cf26672a-3aeb-4534-8776-36a45511c5b4 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 828.591735] env[69328]: DEBUG nova.compute.manager [req-421687f0-2682-42b0-9f23-133a24bd72c7 req-508fef02-4bbd-48d7-bed6-d003a3d292ea service nova] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Refreshing instance network info cache due to event network-changed-cf26672a-3aeb-4534-8776-36a45511c5b4. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 828.591974] env[69328]: DEBUG oslo_concurrency.lockutils [req-421687f0-2682-42b0-9f23-133a24bd72c7 req-508fef02-4bbd-48d7-bed6-d003a3d292ea service nova] Acquiring lock "refresh_cache-d045c9ca-71f9-411e-9048-71b36c32f4b2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 828.592187] env[69328]: DEBUG oslo_concurrency.lockutils [req-421687f0-2682-42b0-9f23-133a24bd72c7 req-508fef02-4bbd-48d7-bed6-d003a3d292ea service nova] Acquired lock "refresh_cache-d045c9ca-71f9-411e-9048-71b36c32f4b2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 828.592448] env[69328]: DEBUG nova.network.neutron [req-421687f0-2682-42b0-9f23-133a24bd72c7 req-508fef02-4bbd-48d7-bed6-d003a3d292ea service nova] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Refreshing network info cache for port cf26672a-3aeb-4534-8776-36a45511c5b4 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 828.603013] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a15a5191-3036-4059-83a7-dcdee25dcac7 tempest-FloatingIPsAssociationTestJSON-1084095273 tempest-FloatingIPsAssociationTestJSON-1084095273-project-member] Lock "e92953f4-b634-4ef9-a5ad-63a886cfa007" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.132s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 828.764777] env[69328]: DEBUG oslo_vmware.api [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273329, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.819318] env[69328]: DEBUG nova.compute.manager [req-cbbbac35-4781-4ae5-a110-de8b425d8739 req-0829d944-1b26-43d2-9bce-90dcefd29bb0 service nova] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Received event network-vif-plugged-f3f139e6-24e0-47d6-8700-cdcaec9d0b1b {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 828.819318] env[69328]: DEBUG oslo_concurrency.lockutils [req-cbbbac35-4781-4ae5-a110-de8b425d8739 req-0829d944-1b26-43d2-9bce-90dcefd29bb0 service nova] Acquiring lock "d045c9ca-71f9-411e-9048-71b36c32f4b2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 828.819318] env[69328]: DEBUG oslo_concurrency.lockutils [req-cbbbac35-4781-4ae5-a110-de8b425d8739 req-0829d944-1b26-43d2-9bce-90dcefd29bb0 service nova] Lock "d045c9ca-71f9-411e-9048-71b36c32f4b2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 828.819318] env[69328]: DEBUG oslo_concurrency.lockutils [req-cbbbac35-4781-4ae5-a110-de8b425d8739 req-0829d944-1b26-43d2-9bce-90dcefd29bb0 service nova] Lock "d045c9ca-71f9-411e-9048-71b36c32f4b2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 828.819318] env[69328]: DEBUG nova.compute.manager [req-cbbbac35-4781-4ae5-a110-de8b425d8739 req-0829d944-1b26-43d2-9bce-90dcefd29bb0 service nova] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] No waiting events found dispatching network-vif-plugged-f3f139e6-24e0-47d6-8700-cdcaec9d0b1b {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 828.819495] env[69328]: WARNING nova.compute.manager [req-cbbbac35-4781-4ae5-a110-de8b425d8739 req-0829d944-1b26-43d2-9bce-90dcefd29bb0 service nova] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Received unexpected event network-vif-plugged-f3f139e6-24e0-47d6-8700-cdcaec9d0b1b for instance with vm_state building and task_state spawning. [ 828.889601] env[69328]: DEBUG oslo_vmware.api [None req-13da97df-4b58-47dc-82b5-0277fc90698d tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273330, 'name': PowerOffVM_Task, 'duration_secs': 0.237051} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.889890] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-13da97df-4b58-47dc-82b5-0277fc90698d tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 828.890155] env[69328]: DEBUG nova.compute.manager [None req-13da97df-4b58-47dc-82b5-0277fc90698d tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 828.890940] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15d69ef0-b223-47e9-a7b6-0c40953392f2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.948241] env[69328]: DEBUG nova.network.neutron [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Successfully updated port: f3f139e6-24e0-47d6-8700-cdcaec9d0b1b {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 829.043631] env[69328]: DEBUG oslo_vmware.api [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Task: {'id': task-3273331, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084779} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.043898] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 829.044695] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dc53e59-28ca-4bf6-89aa-a551c561eccd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.066617] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Reconfiguring VM instance instance-00000036 to attach disk [datastore1] b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b/b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 829.066959] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a30c3f30-b45a-4057-ae88-1902df42a304 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.090915] env[69328]: DEBUG oslo_vmware.api [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Waiting for the task: (returnval){ [ 829.090915] env[69328]: value = "task-3273332" [ 829.090915] env[69328]: _type = "Task" [ 829.090915] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.100854] env[69328]: DEBUG oslo_vmware.api [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Task: {'id': task-3273332, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.168276] env[69328]: DEBUG nova.network.neutron [req-421687f0-2682-42b0-9f23-133a24bd72c7 req-508fef02-4bbd-48d7-bed6-d003a3d292ea service nova] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 829.267697] env[69328]: DEBUG oslo_vmware.api [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273329, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.630056} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.267957] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 3daf7b73-5679-47ce-b847-f3786f1000d4/3daf7b73-5679-47ce-b847-f3786f1000d4.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 829.268181] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 829.268434] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-685bd34a-cc32-4c73-879c-a9ea8f832cd3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.275965] env[69328]: DEBUG oslo_vmware.api [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Waiting for the task: (returnval){ [ 829.275965] env[69328]: value = "task-3273333" [ 829.275965] env[69328]: _type = "Task" [ 829.275965] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.286904] env[69328]: DEBUG oslo_vmware.api [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273333, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.322584] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afb18f3c-b9e8-4362-9f0c-95e2858edc95 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.346044] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Updating instance '25fb207b-9388-4198-bb48-ab7cebd43375' progress to 0 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 829.350374] env[69328]: DEBUG nova.network.neutron [req-421687f0-2682-42b0-9f23-133a24bd72c7 req-508fef02-4bbd-48d7-bed6-d003a3d292ea service nova] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.407153] env[69328]: DEBUG oslo_concurrency.lockutils [None req-13da97df-4b58-47dc-82b5-0277fc90698d tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lock "62fa6807-f67d-4bf5-ba23-9e97f9da120e" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.057s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 829.561231] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d05a8c5-e457-4aac-ad68-c0007db6ebdd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.572018] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c18a3c5b-85f8-438d-90c6-3938045b244d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.610127] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9662c42d-ccc5-4acb-90f2-cd51d9f00379 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.621318] env[69328]: DEBUG oslo_vmware.api [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Task: {'id': task-3273332, 'name': ReconfigVM_Task, 'duration_secs': 0.307306} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.622742] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4df03b29-6e45-43a0-9ec6-a4519865f9dc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.626738] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Reconfigured VM instance instance-00000036 to attach disk [datastore1] b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b/b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 829.627376] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ce187c08-c73c-491b-a5a5-55d88edbe227 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.640371] env[69328]: DEBUG nova.compute.provider_tree [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 829.642920] env[69328]: DEBUG oslo_vmware.api [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Waiting for the task: (returnval){ [ 829.642920] env[69328]: value = "task-3273334" [ 829.642920] env[69328]: _type = "Task" [ 829.642920] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.786449] env[69328]: DEBUG oslo_vmware.api [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273333, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076532} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.787164] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 829.788130] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca3ca57e-f0ba-4705-858a-cce69fb22720 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.823188] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Reconfiguring VM instance instance-00000038 to attach disk [datastore2] 3daf7b73-5679-47ce-b847-f3786f1000d4/3daf7b73-5679-47ce-b847-f3786f1000d4.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 829.823960] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-34171cb0-5c0d-4ebd-b5b8-12cc20f997ec {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.845297] env[69328]: DEBUG oslo_vmware.api [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Waiting for the task: (returnval){ [ 829.845297] env[69328]: value = "task-3273335" [ 829.845297] env[69328]: _type = "Task" [ 829.845297] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.854730] env[69328]: DEBUG oslo_concurrency.lockutils [req-421687f0-2682-42b0-9f23-133a24bd72c7 req-508fef02-4bbd-48d7-bed6-d003a3d292ea service nova] Releasing lock "refresh_cache-d045c9ca-71f9-411e-9048-71b36c32f4b2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 829.855123] env[69328]: DEBUG oslo_vmware.api [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273335, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.856638] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 829.856935] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0e480a38-86e2-4a56-bbf7-55968a4227f2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.865616] env[69328]: DEBUG oslo_vmware.api [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Waiting for the task: (returnval){ [ 829.865616] env[69328]: value = "task-3273336" [ 829.865616] env[69328]: _type = "Task" [ 829.865616] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.875460] env[69328]: DEBUG oslo_vmware.api [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3273336, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.144338] env[69328]: DEBUG nova.scheduler.client.report [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 830.160704] env[69328]: DEBUG oslo_vmware.api [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Task: {'id': task-3273334, 'name': Rename_Task, 'duration_secs': 0.18929} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.161449] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 830.161449] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dd6e70dd-8f13-4d9d-b8ec-fcb605f56406 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.171036] env[69328]: DEBUG oslo_vmware.api [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Waiting for the task: (returnval){ [ 830.171036] env[69328]: value = "task-3273337" [ 830.171036] env[69328]: _type = "Task" [ 830.171036] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.179285] env[69328]: DEBUG oslo_vmware.api [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Task: {'id': task-3273337, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.355985] env[69328]: DEBUG oslo_vmware.api [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273335, 'name': ReconfigVM_Task, 'duration_secs': 0.333817} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.356294] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Reconfigured VM instance instance-00000038 to attach disk [datastore2] 3daf7b73-5679-47ce-b847-f3786f1000d4/3daf7b73-5679-47ce-b847-f3786f1000d4.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 830.356921] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-def26666-692b-433f-8911-db557cce266a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.363972] env[69328]: DEBUG oslo_vmware.api [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Waiting for the task: (returnval){ [ 830.363972] env[69328]: value = "task-3273338" [ 830.363972] env[69328]: _type = "Task" [ 830.363972] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.376307] env[69328]: DEBUG oslo_vmware.api [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273338, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.379632] env[69328]: DEBUG oslo_vmware.api [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3273336, 'name': PowerOffVM_Task, 'duration_secs': 0.244321} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.379945] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 830.380173] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Updating instance '25fb207b-9388-4198-bb48-ab7cebd43375' progress to 17 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 830.652923] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.587s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 830.653604] env[69328]: DEBUG nova.compute.manager [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 830.656059] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a1b7b68d-5e62-40b9-8791-1ad9c1de04f2 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.151s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 830.656285] env[69328]: DEBUG nova.objects.instance [None req-a1b7b68d-5e62-40b9-8791-1ad9c1de04f2 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Lazy-loading 'resources' on Instance uuid 99e31dfd-5d41-4564-886f-becc25ca289c {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 830.681014] env[69328]: DEBUG oslo_vmware.api [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Task: {'id': task-3273337, 'name': PowerOnVM_Task, 'duration_secs': 0.450176} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.681440] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 830.681682] env[69328]: DEBUG nova.compute.manager [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 830.682849] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d2fb3ef-55c4-4945-bb6b-d29476766212 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.780009] env[69328]: DEBUG nova.compute.manager [None req-2c577ff4-880b-4dc7-a9c8-2dd1a981a125 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 830.780265] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68ade28d-6ddc-4b4e-b1c4-bcaaafbb3c13 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.842900] env[69328]: DEBUG nova.compute.manager [req-bd6fc8fc-46d5-4544-bd41-9a842c39656c req-5da63ce7-f47e-4151-b451-004e8cdec98a service nova] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Received event network-changed-f3f139e6-24e0-47d6-8700-cdcaec9d0b1b {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 830.843131] env[69328]: DEBUG nova.compute.manager [req-bd6fc8fc-46d5-4544-bd41-9a842c39656c req-5da63ce7-f47e-4151-b451-004e8cdec98a service nova] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Refreshing instance network info cache due to event network-changed-f3f139e6-24e0-47d6-8700-cdcaec9d0b1b. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 830.843359] env[69328]: DEBUG oslo_concurrency.lockutils [req-bd6fc8fc-46d5-4544-bd41-9a842c39656c req-5da63ce7-f47e-4151-b451-004e8cdec98a service nova] Acquiring lock "refresh_cache-d045c9ca-71f9-411e-9048-71b36c32f4b2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.843491] env[69328]: DEBUG oslo_concurrency.lockutils [req-bd6fc8fc-46d5-4544-bd41-9a842c39656c req-5da63ce7-f47e-4151-b451-004e8cdec98a service nova] Acquired lock "refresh_cache-d045c9ca-71f9-411e-9048-71b36c32f4b2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 830.843681] env[69328]: DEBUG nova.network.neutron [req-bd6fc8fc-46d5-4544-bd41-9a842c39656c req-5da63ce7-f47e-4151-b451-004e8cdec98a service nova] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Refreshing network info cache for port f3f139e6-24e0-47d6-8700-cdcaec9d0b1b {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 830.874081] env[69328]: DEBUG oslo_vmware.api [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273338, 'name': Rename_Task, 'duration_secs': 0.166171} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.874327] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 830.874568] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d761780a-9e8f-42e0-8775-1e8597daea5f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.881993] env[69328]: DEBUG oslo_vmware.api [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Waiting for the task: (returnval){ [ 830.881993] env[69328]: value = "task-3273339" [ 830.881993] env[69328]: _type = "Task" [ 830.881993] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.887042] env[69328]: DEBUG nova.virt.hardware [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:34:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 830.887262] env[69328]: DEBUG nova.virt.hardware [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 830.888026] env[69328]: DEBUG nova.virt.hardware [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 830.888026] env[69328]: DEBUG nova.virt.hardware [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 830.888026] env[69328]: DEBUG nova.virt.hardware [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 830.888026] env[69328]: DEBUG nova.virt.hardware [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 830.888182] env[69328]: DEBUG nova.virt.hardware [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 830.888215] env[69328]: DEBUG nova.virt.hardware [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 830.888371] env[69328]: DEBUG nova.virt.hardware [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 830.888527] env[69328]: DEBUG nova.virt.hardware [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 830.888691] env[69328]: DEBUG nova.virt.hardware [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 830.893874] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-32173ca7-2195-4716-aab2-bde4d49586d8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.911699] env[69328]: DEBUG oslo_vmware.api [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273339, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.913445] env[69328]: DEBUG oslo_vmware.api [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Waiting for the task: (returnval){ [ 830.913445] env[69328]: value = "task-3273340" [ 830.913445] env[69328]: _type = "Task" [ 830.913445] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.922857] env[69328]: DEBUG oslo_vmware.api [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3273340, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.122865] env[69328]: DEBUG nova.network.neutron [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Successfully updated port: 046427f3-7078-436b-be68-5df86aa70395 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 831.162375] env[69328]: DEBUG nova.compute.utils [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 831.163202] env[69328]: DEBUG nova.compute.manager [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Not allocating networking since 'none' was specified. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 831.203406] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 831.293972] env[69328]: INFO nova.compute.manager [None req-2c577ff4-880b-4dc7-a9c8-2dd1a981a125 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] instance snapshotting [ 831.293972] env[69328]: WARNING nova.compute.manager [None req-2c577ff4-880b-4dc7-a9c8-2dd1a981a125 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] trying to snapshot a non-running instance: (state: 4 expected: 1) [ 831.296144] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f268cf43-633b-4d60-b093-3e833a91f6dd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.321384] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cbe0464-0f82-4c07-86c7-2f276b1cfccf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.400684] env[69328]: DEBUG oslo_vmware.api [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273339, 'name': PowerOnVM_Task, 'duration_secs': 0.484207} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.401784] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 831.401784] env[69328]: INFO nova.compute.manager [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Took 8.54 seconds to spawn the instance on the hypervisor. [ 831.401784] env[69328]: DEBUG nova.compute.manager [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 831.402287] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b95df65-7df4-466c-9a67-72da11aedfd2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.414030] env[69328]: DEBUG nova.network.neutron [req-bd6fc8fc-46d5-4544-bd41-9a842c39656c req-5da63ce7-f47e-4151-b451-004e8cdec98a service nova] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 831.432586] env[69328]: DEBUG oslo_vmware.api [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3273340, 'name': ReconfigVM_Task, 'duration_secs': 0.242268} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.437027] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Updating instance '25fb207b-9388-4198-bb48-ab7cebd43375' progress to 33 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 831.560583] env[69328]: DEBUG nova.network.neutron [req-bd6fc8fc-46d5-4544-bd41-9a842c39656c req-5da63ce7-f47e-4151-b451-004e8cdec98a service nova] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.630566] env[69328]: DEBUG oslo_concurrency.lockutils [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Acquiring lock "refresh_cache-d045c9ca-71f9-411e-9048-71b36c32f4b2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.665128] env[69328]: DEBUG nova.compute.manager [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 831.689650] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dc9d2e1-f0e6-429b-a33a-dc9c2682937c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.700937] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3312b604-a617-42f7-982f-b96a0b330548 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.735204] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bbd5ac0-dc3c-402c-a495-654cc019872e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.744445] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9144eafd-1960-48b6-ad5b-1e67417a2c8b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.759616] env[69328]: DEBUG nova.compute.provider_tree [None req-a1b7b68d-5e62-40b9-8791-1ad9c1de04f2 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 831.836240] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2c577ff4-880b-4dc7-a9c8-2dd1a981a125 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Creating Snapshot of the VM instance {{(pid=69328) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 831.836605] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-abb6c029-eca0-41e9-8f01-1788c39f44b9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.845421] env[69328]: DEBUG oslo_vmware.api [None req-2c577ff4-880b-4dc7-a9c8-2dd1a981a125 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 831.845421] env[69328]: value = "task-3273341" [ 831.845421] env[69328]: _type = "Task" [ 831.845421] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.854025] env[69328]: DEBUG oslo_vmware.api [None req-2c577ff4-880b-4dc7-a9c8-2dd1a981a125 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273341, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.926117] env[69328]: INFO nova.compute.manager [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Took 44.17 seconds to build instance. [ 831.945051] env[69328]: DEBUG nova.virt.hardware [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:37:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='32e26fb7-ec83-4d85-ade8-a07c889bcc21',id=40,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-718846255',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 831.945051] env[69328]: DEBUG nova.virt.hardware [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 831.945051] env[69328]: DEBUG nova.virt.hardware [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 831.945051] env[69328]: DEBUG nova.virt.hardware [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 831.945489] env[69328]: DEBUG nova.virt.hardware [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 831.945489] env[69328]: DEBUG nova.virt.hardware [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 831.945926] env[69328]: DEBUG nova.virt.hardware [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 831.946314] env[69328]: DEBUG nova.virt.hardware [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 831.946612] env[69328]: DEBUG nova.virt.hardware [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 831.946895] env[69328]: DEBUG nova.virt.hardware [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 831.947204] env[69328]: DEBUG nova.virt.hardware [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 831.952965] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Reconfiguring VM instance instance-0000002f to detach disk 2000 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 831.953672] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7d4dfdb5-0dfb-4800-a3cc-0572eb58df24 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.978023] env[69328]: DEBUG oslo_vmware.api [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Waiting for the task: (returnval){ [ 831.978023] env[69328]: value = "task-3273342" [ 831.978023] env[69328]: _type = "Task" [ 831.978023] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.986195] env[69328]: DEBUG oslo_vmware.api [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3273342, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.064267] env[69328]: DEBUG oslo_concurrency.lockutils [req-bd6fc8fc-46d5-4544-bd41-9a842c39656c req-5da63ce7-f47e-4151-b451-004e8cdec98a service nova] Releasing lock "refresh_cache-d045c9ca-71f9-411e-9048-71b36c32f4b2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 832.065048] env[69328]: DEBUG oslo_concurrency.lockutils [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Acquired lock "refresh_cache-d045c9ca-71f9-411e-9048-71b36c32f4b2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 832.065627] env[69328]: DEBUG nova.network.neutron [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 832.115695] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9c8fce6a-5227-4ccd-aaeb-edeccfb2e06e tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Acquiring lock "b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 832.115695] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9c8fce6a-5227-4ccd-aaeb-edeccfb2e06e tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Lock "b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 832.115695] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9c8fce6a-5227-4ccd-aaeb-edeccfb2e06e tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Acquiring lock "b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 832.115695] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9c8fce6a-5227-4ccd-aaeb-edeccfb2e06e tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Lock "b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 832.116252] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9c8fce6a-5227-4ccd-aaeb-edeccfb2e06e tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Lock "b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 832.116864] env[69328]: INFO nova.compute.manager [None req-9c8fce6a-5227-4ccd-aaeb-edeccfb2e06e tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Terminating instance [ 832.263231] env[69328]: DEBUG nova.scheduler.client.report [None req-a1b7b68d-5e62-40b9-8791-1ad9c1de04f2 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 832.358056] env[69328]: DEBUG oslo_vmware.api [None req-2c577ff4-880b-4dc7-a9c8-2dd1a981a125 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273341, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.427931] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ef65691d-9471-4b07-af1f-3085d66114d7 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Lock "3daf7b73-5679-47ce-b847-f3786f1000d4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.043s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 832.489241] env[69328]: DEBUG oslo_vmware.api [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3273342, 'name': ReconfigVM_Task, 'duration_secs': 0.205096} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.489646] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Reconfigured VM instance instance-0000002f to detach disk 2000 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 832.490485] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46102bb9-bbac-4c87-adbf-b0926e564417 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.516451] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Reconfiguring VM instance instance-0000002f to attach disk [datastore1] 25fb207b-9388-4198-bb48-ab7cebd43375/25fb207b-9388-4198-bb48-ab7cebd43375.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 832.516772] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f640ac80-5166-4a61-b4db-56def3b7f78e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.535612] env[69328]: DEBUG oslo_vmware.api [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Waiting for the task: (returnval){ [ 832.535612] env[69328]: value = "task-3273343" [ 832.535612] env[69328]: _type = "Task" [ 832.535612] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.545026] env[69328]: DEBUG oslo_vmware.api [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3273343, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.620833] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9c8fce6a-5227-4ccd-aaeb-edeccfb2e06e tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Acquiring lock "refresh_cache-b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.621036] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9c8fce6a-5227-4ccd-aaeb-edeccfb2e06e tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Acquired lock "refresh_cache-b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 832.621285] env[69328]: DEBUG nova.network.neutron [None req-9c8fce6a-5227-4ccd-aaeb-edeccfb2e06e tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 832.627769] env[69328]: DEBUG nova.network.neutron [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 832.676286] env[69328]: DEBUG nova.compute.manager [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 832.703716] env[69328]: DEBUG nova.virt.hardware [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 832.703979] env[69328]: DEBUG nova.virt.hardware [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 832.704149] env[69328]: DEBUG nova.virt.hardware [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 832.707564] env[69328]: DEBUG nova.virt.hardware [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 832.707564] env[69328]: DEBUG nova.virt.hardware [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 832.707564] env[69328]: DEBUG nova.virt.hardware [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 832.707564] env[69328]: DEBUG nova.virt.hardware [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 832.707564] env[69328]: DEBUG nova.virt.hardware [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 832.707776] env[69328]: DEBUG nova.virt.hardware [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 832.707776] env[69328]: DEBUG nova.virt.hardware [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 832.707776] env[69328]: DEBUG nova.virt.hardware [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 832.707776] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30314761-41f2-49e5-9734-01912393c319 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.718040] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe1ca996-7a9e-4daa-8d35-bc526545c8f1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.733316] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Instance VIF info [] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 832.739449] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Creating folder: Project (af007c3a4165487d9fdf5d7a758a3327). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 832.739765] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0bdb3044-8869-461e-a0b4-eed9cce53b99 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.755995] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Created folder: Project (af007c3a4165487d9fdf5d7a758a3327) in parent group-v653649. [ 832.755995] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Creating folder: Instances. Parent ref: group-v653812. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 832.755995] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b0729225-c062-4786-b546-f9fef78f5b55 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.768180] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Created folder: Instances in parent group-v653812. [ 832.768180] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 832.768463] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 832.769021] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b76ab8ff-e716-4bf4-8e69-054f9869aca3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.783493] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a1b7b68d-5e62-40b9-8791-1ad9c1de04f2 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.126s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 832.784133] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.718s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 832.785584] env[69328]: INFO nova.compute.claims [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 832.794493] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 832.794493] env[69328]: value = "task-3273346" [ 832.794493] env[69328]: _type = "Task" [ 832.794493] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.809804] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273346, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.828043] env[69328]: INFO nova.scheduler.client.report [None req-a1b7b68d-5e62-40b9-8791-1ad9c1de04f2 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Deleted allocations for instance 99e31dfd-5d41-4564-886f-becc25ca289c [ 832.858656] env[69328]: DEBUG oslo_vmware.api [None req-2c577ff4-880b-4dc7-a9c8-2dd1a981a125 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273341, 'name': CreateSnapshot_Task, 'duration_secs': 0.820489} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.862474] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2c577ff4-880b-4dc7-a9c8-2dd1a981a125 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Created Snapshot of the VM instance {{(pid=69328) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 832.863604] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47a90bc8-85a9-4048-9e7f-c80fb09a8fb7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.944161] env[69328]: DEBUG nova.compute.manager [req-7451ab56-af54-44cd-ba06-84b0291738fd req-0d473d0d-3875-47a0-879f-be91f8856558 service nova] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Received event network-vif-plugged-046427f3-7078-436b-be68-5df86aa70395 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 832.944161] env[69328]: DEBUG oslo_concurrency.lockutils [req-7451ab56-af54-44cd-ba06-84b0291738fd req-0d473d0d-3875-47a0-879f-be91f8856558 service nova] Acquiring lock "d045c9ca-71f9-411e-9048-71b36c32f4b2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 832.944161] env[69328]: DEBUG oslo_concurrency.lockutils [req-7451ab56-af54-44cd-ba06-84b0291738fd req-0d473d0d-3875-47a0-879f-be91f8856558 service nova] Lock "d045c9ca-71f9-411e-9048-71b36c32f4b2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 832.944161] env[69328]: DEBUG oslo_concurrency.lockutils [req-7451ab56-af54-44cd-ba06-84b0291738fd req-0d473d0d-3875-47a0-879f-be91f8856558 service nova] Lock "d045c9ca-71f9-411e-9048-71b36c32f4b2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 832.944161] env[69328]: DEBUG nova.compute.manager [req-7451ab56-af54-44cd-ba06-84b0291738fd req-0d473d0d-3875-47a0-879f-be91f8856558 service nova] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] No waiting events found dispatching network-vif-plugged-046427f3-7078-436b-be68-5df86aa70395 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 832.944441] env[69328]: WARNING nova.compute.manager [req-7451ab56-af54-44cd-ba06-84b0291738fd req-0d473d0d-3875-47a0-879f-be91f8856558 service nova] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Received unexpected event network-vif-plugged-046427f3-7078-436b-be68-5df86aa70395 for instance with vm_state building and task_state spawning. [ 832.945604] env[69328]: DEBUG nova.compute.manager [req-7451ab56-af54-44cd-ba06-84b0291738fd req-0d473d0d-3875-47a0-879f-be91f8856558 service nova] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Received event network-changed-046427f3-7078-436b-be68-5df86aa70395 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 832.946153] env[69328]: DEBUG nova.compute.manager [req-7451ab56-af54-44cd-ba06-84b0291738fd req-0d473d0d-3875-47a0-879f-be91f8856558 service nova] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Refreshing instance network info cache due to event network-changed-046427f3-7078-436b-be68-5df86aa70395. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 832.946468] env[69328]: DEBUG oslo_concurrency.lockutils [req-7451ab56-af54-44cd-ba06-84b0291738fd req-0d473d0d-3875-47a0-879f-be91f8856558 service nova] Acquiring lock "refresh_cache-d045c9ca-71f9-411e-9048-71b36c32f4b2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.048912] env[69328]: DEBUG oslo_vmware.api [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3273343, 'name': ReconfigVM_Task, 'duration_secs': 0.28505} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.049258] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Reconfigured VM instance instance-0000002f to attach disk [datastore1] 25fb207b-9388-4198-bb48-ab7cebd43375/25fb207b-9388-4198-bb48-ab7cebd43375.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 833.049623] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Updating instance '25fb207b-9388-4198-bb48-ab7cebd43375' progress to 50 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 833.151655] env[69328]: DEBUG nova.network.neutron [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Updating instance_info_cache with network_info: [{"id": "cf26672a-3aeb-4534-8776-36a45511c5b4", "address": "fa:16:3e:6d:84:f2", "network": {"id": "f42dac65-89fd-461c-a160-43aaa738a5e3", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-710236275", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.189", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f71a072b33154efe9636b50e25f93381", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bd3c6b64-aba2-4bdc-a693-3b4dff3ed861", "external-id": "nsx-vlan-transportzone-600", "segmentation_id": 600, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf26672a-3a", "ovs_interfaceid": "cf26672a-3aeb-4534-8776-36a45511c5b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "f3f139e6-24e0-47d6-8700-cdcaec9d0b1b", "address": "fa:16:3e:f4:9e:82", "network": {"id": "dd10b920-6fde-40af-85b6-0d58265d3255", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1828541594", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.161", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "f71a072b33154efe9636b50e25f93381", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e365f3b9-706b-4fa2-8f95-ae51b35ab011", "external-id": "nsx-vlan-transportzone-154", "segmentation_id": 154, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3f139e6-24", "ovs_interfaceid": "f3f139e6-24e0-47d6-8700-cdcaec9d0b1b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "046427f3-7078-436b-be68-5df86aa70395", "address": "fa:16:3e:0a:43:8a", "network": {"id": "f42dac65-89fd-461c-a160-43aaa738a5e3", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-710236275", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.130", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f71a072b33154efe9636b50e25f93381", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bd3c6b64-aba2-4bdc-a693-3b4dff3ed861", "external-id": "nsx-vlan-transportzone-600", "segmentation_id": 600, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap046427f3-70", "ovs_interfaceid": "046427f3-7078-436b-be68-5df86aa70395", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 833.163153] env[69328]: DEBUG nova.network.neutron [None req-9c8fce6a-5227-4ccd-aaeb-edeccfb2e06e tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 833.257524] env[69328]: DEBUG nova.network.neutron [None req-9c8fce6a-5227-4ccd-aaeb-edeccfb2e06e tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 833.306057] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273346, 'name': CreateVM_Task, 'duration_secs': 0.412985} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.306412] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 833.306902] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.307100] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 833.307627] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 833.307735] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6df337a7-e5cf-4dcc-a48e-6111b2a85847 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.315555] env[69328]: DEBUG oslo_vmware.api [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Waiting for the task: (returnval){ [ 833.315555] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]526639b3-f8e2-db45-e765-4a42445b3bd0" [ 833.315555] env[69328]: _type = "Task" [ 833.315555] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.326046] env[69328]: DEBUG oslo_vmware.api [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]526639b3-f8e2-db45-e765-4a42445b3bd0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.340060] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a1b7b68d-5e62-40b9-8791-1ad9c1de04f2 tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Lock "99e31dfd-5d41-4564-886f-becc25ca289c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.404s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 833.390652] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2c577ff4-880b-4dc7-a9c8-2dd1a981a125 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Creating linked-clone VM from snapshot {{(pid=69328) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 833.391547] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-3109a378-a7d3-43f6-8ea9-72600aa92531 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.403300] env[69328]: DEBUG oslo_vmware.api [None req-2c577ff4-880b-4dc7-a9c8-2dd1a981a125 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 833.403300] env[69328]: value = "task-3273347" [ 833.403300] env[69328]: _type = "Task" [ 833.403300] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.419327] env[69328]: DEBUG oslo_vmware.api [None req-2c577ff4-880b-4dc7-a9c8-2dd1a981a125 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273347, 'name': CloneVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.561182] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89b2f714-0a76-44a1-9c21-cbbbda585acd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.583348] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c364be18-d5a8-4ff7-8138-e02cae060ee1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.605319] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Updating instance '25fb207b-9388-4198-bb48-ab7cebd43375' progress to 67 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 833.654548] env[69328]: DEBUG oslo_concurrency.lockutils [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Releasing lock "refresh_cache-d045c9ca-71f9-411e-9048-71b36c32f4b2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 833.655612] env[69328]: DEBUG nova.compute.manager [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Instance network_info: |[{"id": "cf26672a-3aeb-4534-8776-36a45511c5b4", "address": "fa:16:3e:6d:84:f2", "network": {"id": "f42dac65-89fd-461c-a160-43aaa738a5e3", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-710236275", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.189", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f71a072b33154efe9636b50e25f93381", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bd3c6b64-aba2-4bdc-a693-3b4dff3ed861", "external-id": "nsx-vlan-transportzone-600", "segmentation_id": 600, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf26672a-3a", "ovs_interfaceid": "cf26672a-3aeb-4534-8776-36a45511c5b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "f3f139e6-24e0-47d6-8700-cdcaec9d0b1b", "address": "fa:16:3e:f4:9e:82", "network": {"id": "dd10b920-6fde-40af-85b6-0d58265d3255", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1828541594", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.161", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "f71a072b33154efe9636b50e25f93381", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e365f3b9-706b-4fa2-8f95-ae51b35ab011", "external-id": "nsx-vlan-transportzone-154", "segmentation_id": 154, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3f139e6-24", "ovs_interfaceid": "f3f139e6-24e0-47d6-8700-cdcaec9d0b1b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "046427f3-7078-436b-be68-5df86aa70395", "address": "fa:16:3e:0a:43:8a", "network": {"id": "f42dac65-89fd-461c-a160-43aaa738a5e3", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-710236275", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.130", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f71a072b33154efe9636b50e25f93381", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bd3c6b64-aba2-4bdc-a693-3b4dff3ed861", "external-id": "nsx-vlan-transportzone-600", "segmentation_id": 600, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap046427f3-70", "ovs_interfaceid": "046427f3-7078-436b-be68-5df86aa70395", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 833.655769] env[69328]: DEBUG oslo_concurrency.lockutils [req-7451ab56-af54-44cd-ba06-84b0291738fd req-0d473d0d-3875-47a0-879f-be91f8856558 service nova] Acquired lock "refresh_cache-d045c9ca-71f9-411e-9048-71b36c32f4b2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 833.655917] env[69328]: DEBUG nova.network.neutron [req-7451ab56-af54-44cd-ba06-84b0291738fd req-0d473d0d-3875-47a0-879f-be91f8856558 service nova] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Refreshing network info cache for port 046427f3-7078-436b-be68-5df86aa70395 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 833.657106] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6d:84:f2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bd3c6b64-aba2-4bdc-a693-3b4dff3ed861', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cf26672a-3aeb-4534-8776-36a45511c5b4', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:f4:9e:82', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e365f3b9-706b-4fa2-8f95-ae51b35ab011', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f3f139e6-24e0-47d6-8700-cdcaec9d0b1b', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:0a:43:8a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bd3c6b64-aba2-4bdc-a693-3b4dff3ed861', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '046427f3-7078-436b-be68-5df86aa70395', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 833.668791] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Creating folder: Project (f71a072b33154efe9636b50e25f93381). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 833.672357] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ab4add5d-85ce-4c78-a250-bbaf8812a24f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.687406] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Created folder: Project (f71a072b33154efe9636b50e25f93381) in parent group-v653649. [ 833.687607] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Creating folder: Instances. Parent ref: group-v653816. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 833.687846] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2ed5513d-95b5-4025-a288-8c1d3eca98f7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.699281] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Created folder: Instances in parent group-v653816. [ 833.699281] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 833.699281] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 833.699281] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-38bc4c4a-f51a-4f79-bad7-aadb959204f8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.726936] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 833.726936] env[69328]: value = "task-3273350" [ 833.726936] env[69328]: _type = "Task" [ 833.726936] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.737350] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273350, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.760668] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9c8fce6a-5227-4ccd-aaeb-edeccfb2e06e tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Releasing lock "refresh_cache-b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 833.761149] env[69328]: DEBUG nova.compute.manager [None req-9c8fce6a-5227-4ccd-aaeb-edeccfb2e06e tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 833.761408] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9c8fce6a-5227-4ccd-aaeb-edeccfb2e06e tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 833.762345] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03b58547-df0d-4f55-b5be-2b629130e540 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.772531] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c8fce6a-5227-4ccd-aaeb-edeccfb2e06e tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 833.772809] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8a195df9-1b3c-454d-bd8a-a93ac3f0abc4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.781021] env[69328]: DEBUG oslo_vmware.api [None req-9c8fce6a-5227-4ccd-aaeb-edeccfb2e06e tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Waiting for the task: (returnval){ [ 833.781021] env[69328]: value = "task-3273351" [ 833.781021] env[69328]: _type = "Task" [ 833.781021] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.789087] env[69328]: DEBUG oslo_vmware.api [None req-9c8fce6a-5227-4ccd-aaeb-edeccfb2e06e tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Task: {'id': task-3273351, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.834824] env[69328]: DEBUG oslo_vmware.api [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]526639b3-f8e2-db45-e765-4a42445b3bd0, 'name': SearchDatastore_Task, 'duration_secs': 0.01212} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.838087] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 833.838397] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 833.838701] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.838881] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 833.839262] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 833.839632] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7e7a3b43-1a3b-4907-b9ca-65f19a9921c1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.850121] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 833.850235] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 833.851065] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9fc62d34-b1d5-4502-82ea-0cb74f7dd542 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.863857] env[69328]: DEBUG oslo_vmware.api [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Waiting for the task: (returnval){ [ 833.863857] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]528f27ca-9557-0e86-837e-2c3a003fa7f0" [ 833.863857] env[69328]: _type = "Task" [ 833.863857] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.873298] env[69328]: DEBUG oslo_vmware.api [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]528f27ca-9557-0e86-837e-2c3a003fa7f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.918991] env[69328]: DEBUG oslo_vmware.api [None req-2c577ff4-880b-4dc7-a9c8-2dd1a981a125 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273347, 'name': CloneVM_Task} progress is 94%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.999323] env[69328]: DEBUG nova.network.neutron [req-7451ab56-af54-44cd-ba06-84b0291738fd req-0d473d0d-3875-47a0-879f-be91f8856558 service nova] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Updated VIF entry in instance network info cache for port 046427f3-7078-436b-be68-5df86aa70395. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 833.999593] env[69328]: DEBUG nova.network.neutron [req-7451ab56-af54-44cd-ba06-84b0291738fd req-0d473d0d-3875-47a0-879f-be91f8856558 service nova] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Updating instance_info_cache with network_info: [{"id": "cf26672a-3aeb-4534-8776-36a45511c5b4", "address": "fa:16:3e:6d:84:f2", "network": {"id": "f42dac65-89fd-461c-a160-43aaa738a5e3", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-710236275", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.189", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f71a072b33154efe9636b50e25f93381", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bd3c6b64-aba2-4bdc-a693-3b4dff3ed861", "external-id": "nsx-vlan-transportzone-600", "segmentation_id": 600, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf26672a-3a", "ovs_interfaceid": "cf26672a-3aeb-4534-8776-36a45511c5b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "f3f139e6-24e0-47d6-8700-cdcaec9d0b1b", "address": "fa:16:3e:f4:9e:82", "network": {"id": "dd10b920-6fde-40af-85b6-0d58265d3255", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1828541594", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.161", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "f71a072b33154efe9636b50e25f93381", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e365f3b9-706b-4fa2-8f95-ae51b35ab011", "external-id": "nsx-vlan-transportzone-154", "segmentation_id": 154, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3f139e6-24", "ovs_interfaceid": "f3f139e6-24e0-47d6-8700-cdcaec9d0b1b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "046427f3-7078-436b-be68-5df86aa70395", "address": "fa:16:3e:0a:43:8a", "network": {"id": "f42dac65-89fd-461c-a160-43aaa738a5e3", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-710236275", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.130", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f71a072b33154efe9636b50e25f93381", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bd3c6b64-aba2-4bdc-a693-3b4dff3ed861", "external-id": "nsx-vlan-transportzone-600", "segmentation_id": 600, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap046427f3-70", "ovs_interfaceid": "046427f3-7078-436b-be68-5df86aa70395", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.164033] env[69328]: DEBUG nova.network.neutron [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Port 32db9785-1822-4acf-9971-06db92f35c18 binding to destination host cpu-1 is already ACTIVE {{(pid=69328) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 834.239289] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273350, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.294880] env[69328]: DEBUG oslo_vmware.api [None req-9c8fce6a-5227-4ccd-aaeb-edeccfb2e06e tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Task: {'id': task-3273351, 'name': PowerOffVM_Task, 'duration_secs': 0.144829} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.295341] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c8fce6a-5227-4ccd-aaeb-edeccfb2e06e tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 834.295554] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9c8fce6a-5227-4ccd-aaeb-edeccfb2e06e tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 834.295924] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a88ec4ee-9620-481a-bfd8-497194c86b22 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.317271] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95029be0-5efd-4f2d-b54e-3fb16184d59b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.327238] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a984c9a-666b-415a-abc2-63dff90ccc31 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.334347] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9c8fce6a-5227-4ccd-aaeb-edeccfb2e06e tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 834.334570] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9c8fce6a-5227-4ccd-aaeb-edeccfb2e06e tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 834.334812] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c8fce6a-5227-4ccd-aaeb-edeccfb2e06e tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Deleting the datastore file [datastore1] b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 834.335017] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a4d6faf5-e7af-43b6-a129-06cc93860c5d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.376381] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc9756b7-b046-43c3-9ecf-82df33f71be3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.381618] env[69328]: DEBUG oslo_vmware.api [None req-9c8fce6a-5227-4ccd-aaeb-edeccfb2e06e tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Waiting for the task: (returnval){ [ 834.381618] env[69328]: value = "task-3273353" [ 834.381618] env[69328]: _type = "Task" [ 834.381618] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.390814] env[69328]: DEBUG oslo_vmware.api [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]528f27ca-9557-0e86-837e-2c3a003fa7f0, 'name': SearchDatastore_Task, 'duration_secs': 0.013132} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.393061] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bb9c739-d5d4-475a-8109-240ba297d08a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.399996] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b198f7c8-d170-4244-b6bc-b6d64c24eabf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.402347] env[69328]: DEBUG oslo_concurrency.lockutils [None req-970b7cc7-0a5b-4e6c-8853-38e9d5f3f5be tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Acquiring lock "15a8de08-4d20-4329-9867-53e5dff82878" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 834.402600] env[69328]: DEBUG oslo_concurrency.lockutils [None req-970b7cc7-0a5b-4e6c-8853-38e9d5f3f5be tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Lock "15a8de08-4d20-4329-9867-53e5dff82878" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 834.402810] env[69328]: DEBUG oslo_concurrency.lockutils [None req-970b7cc7-0a5b-4e6c-8853-38e9d5f3f5be tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Acquiring lock "15a8de08-4d20-4329-9867-53e5dff82878-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 834.402995] env[69328]: DEBUG oslo_concurrency.lockutils [None req-970b7cc7-0a5b-4e6c-8853-38e9d5f3f5be tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Lock "15a8de08-4d20-4329-9867-53e5dff82878-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 834.403177] env[69328]: DEBUG oslo_concurrency.lockutils [None req-970b7cc7-0a5b-4e6c-8853-38e9d5f3f5be tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Lock "15a8de08-4d20-4329-9867-53e5dff82878-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 834.404739] env[69328]: DEBUG oslo_vmware.api [None req-9c8fce6a-5227-4ccd-aaeb-edeccfb2e06e tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Task: {'id': task-3273353, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.409025] env[69328]: INFO nova.compute.manager [None req-970b7cc7-0a5b-4e6c-8853-38e9d5f3f5be tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Terminating instance [ 834.413324] env[69328]: DEBUG oslo_vmware.api [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Waiting for the task: (returnval){ [ 834.413324] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]528b8ce6-dc26-f9f8-a4ff-88866bb8e737" [ 834.413324] env[69328]: _type = "Task" [ 834.413324] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.426971] env[69328]: DEBUG nova.compute.manager [None req-d43f8c96-3d11-438c-86df-2d641f9b6a9e tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 834.426971] env[69328]: DEBUG nova.compute.provider_tree [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 834.434619] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49ed0c9a-7bd0-4ee6-a78c-f37d90218e04 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.439680] env[69328]: DEBUG oslo_vmware.api [None req-2c577ff4-880b-4dc7-a9c8-2dd1a981a125 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273347, 'name': CloneVM_Task} progress is 94%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.440649] env[69328]: DEBUG nova.scheduler.client.report [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 834.456144] env[69328]: DEBUG oslo_vmware.api [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]528b8ce6-dc26-f9f8-a4ff-88866bb8e737, 'name': SearchDatastore_Task, 'duration_secs': 0.018022} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.456958] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 834.456958] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 4a990411-16cd-4e53-9068-29654b69abe6/4a990411-16cd-4e53-9068-29654b69abe6.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 834.457423] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f1104c74-8355-47dd-a00e-a07cf1fbc52e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.467841] env[69328]: DEBUG oslo_vmware.api [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Waiting for the task: (returnval){ [ 834.467841] env[69328]: value = "task-3273354" [ 834.467841] env[69328]: _type = "Task" [ 834.467841] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.477375] env[69328]: DEBUG oslo_vmware.api [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Task: {'id': task-3273354, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.502742] env[69328]: DEBUG oslo_concurrency.lockutils [req-7451ab56-af54-44cd-ba06-84b0291738fd req-0d473d0d-3875-47a0-879f-be91f8856558 service nova] Releasing lock "refresh_cache-d045c9ca-71f9-411e-9048-71b36c32f4b2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 834.742044] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273350, 'name': CreateVM_Task, 'duration_secs': 0.666279} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.742044] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 834.742044] env[69328]: DEBUG oslo_concurrency.lockutils [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.742044] env[69328]: DEBUG oslo_concurrency.lockutils [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 834.742044] env[69328]: DEBUG oslo_concurrency.lockutils [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 834.742044] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb13d3cc-ffab-471f-82ce-c6d5aba7f871 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.746609] env[69328]: DEBUG oslo_vmware.api [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Waiting for the task: (returnval){ [ 834.746609] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]522d6adf-bb9e-34d3-4373-d1ab8fd1fb34" [ 834.746609] env[69328]: _type = "Task" [ 834.746609] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.756610] env[69328]: DEBUG oslo_vmware.api [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]522d6adf-bb9e-34d3-4373-d1ab8fd1fb34, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.893009] env[69328]: DEBUG oslo_vmware.api [None req-9c8fce6a-5227-4ccd-aaeb-edeccfb2e06e tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Task: {'id': task-3273353, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.159302} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.894126] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c8fce6a-5227-4ccd-aaeb-edeccfb2e06e tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 834.894126] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9c8fce6a-5227-4ccd-aaeb-edeccfb2e06e tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 834.894126] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9c8fce6a-5227-4ccd-aaeb-edeccfb2e06e tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 834.894126] env[69328]: INFO nova.compute.manager [None req-9c8fce6a-5227-4ccd-aaeb-edeccfb2e06e tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Took 1.13 seconds to destroy the instance on the hypervisor. [ 834.894126] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9c8fce6a-5227-4ccd-aaeb-edeccfb2e06e tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 834.894340] env[69328]: DEBUG nova.compute.manager [-] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 834.894340] env[69328]: DEBUG nova.network.neutron [-] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 834.916338] env[69328]: DEBUG nova.compute.manager [None req-970b7cc7-0a5b-4e6c-8853-38e9d5f3f5be tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 834.916562] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-970b7cc7-0a5b-4e6c-8853-38e9d5f3f5be tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 834.917228] env[69328]: DEBUG oslo_vmware.api [None req-2c577ff4-880b-4dc7-a9c8-2dd1a981a125 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273347, 'name': CloneVM_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.917801] env[69328]: DEBUG nova.network.neutron [-] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 834.919634] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63a329ac-fb19-4004-a587-68d15f1111e2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.931189] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-970b7cc7-0a5b-4e6c-8853-38e9d5f3f5be tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 834.931189] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eb3c96a8-e8d4-46a7-b390-cb20ded1e0c0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.938565] env[69328]: DEBUG oslo_vmware.api [None req-970b7cc7-0a5b-4e6c-8853-38e9d5f3f5be tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Waiting for the task: (returnval){ [ 834.938565] env[69328]: value = "task-3273355" [ 834.938565] env[69328]: _type = "Task" [ 834.938565] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.946136] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.162s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 834.946794] env[69328]: DEBUG nova.compute.manager [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 834.954215] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.257s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 834.957980] env[69328]: INFO nova.compute.claims [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 834.959598] env[69328]: DEBUG oslo_vmware.api [None req-970b7cc7-0a5b-4e6c-8853-38e9d5f3f5be tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3273355, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.961067] env[69328]: INFO nova.compute.manager [None req-d43f8c96-3d11-438c-86df-2d641f9b6a9e tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] instance snapshotting [ 834.963949] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1f26b8d-3b3e-4824-b62f-baec0578d0be {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.995140] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-674ff019-2574-475d-af80-4c8d0f5a1c38 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.997218] env[69328]: DEBUG oslo_vmware.api [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Task: {'id': task-3273354, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.192709] env[69328]: DEBUG oslo_concurrency.lockutils [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Acquiring lock "25fb207b-9388-4198-bb48-ab7cebd43375-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 835.193097] env[69328]: DEBUG oslo_concurrency.lockutils [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Lock "25fb207b-9388-4198-bb48-ab7cebd43375-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 835.193634] env[69328]: DEBUG oslo_concurrency.lockutils [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Lock "25fb207b-9388-4198-bb48-ab7cebd43375-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 835.259294] env[69328]: DEBUG oslo_vmware.api [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]522d6adf-bb9e-34d3-4373-d1ab8fd1fb34, 'name': SearchDatastore_Task, 'duration_secs': 0.022141} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.259762] env[69328]: DEBUG oslo_concurrency.lockutils [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 835.260142] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 835.260482] env[69328]: DEBUG oslo_concurrency.lockutils [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 835.260803] env[69328]: DEBUG oslo_concurrency.lockutils [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 835.261602] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 835.261971] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7eb3f04b-a9e1-4379-a5ed-613ade5151bb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.274834] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 835.275207] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 835.276191] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d7b03a7-4770-4e8f-ab8f-3cf705ebff5c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.282393] env[69328]: DEBUG oslo_vmware.api [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Waiting for the task: (returnval){ [ 835.282393] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52a51524-a6d3-fe82-79d1-3affd8049b46" [ 835.282393] env[69328]: _type = "Task" [ 835.282393] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.292613] env[69328]: DEBUG oslo_vmware.api [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52a51524-a6d3-fe82-79d1-3affd8049b46, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.415889] env[69328]: DEBUG oslo_vmware.api [None req-2c577ff4-880b-4dc7-a9c8-2dd1a981a125 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273347, 'name': CloneVM_Task, 'duration_secs': 1.635677} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.416568] env[69328]: INFO nova.virt.vmwareapi.vmops [None req-2c577ff4-880b-4dc7-a9c8-2dd1a981a125 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Created linked-clone VM from snapshot [ 835.417875] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c0d9fd9-1f4e-496f-bbdf-dc373bca3c5d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.426572] env[69328]: DEBUG nova.network.neutron [-] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.429462] env[69328]: DEBUG nova.virt.vmwareapi.images [None req-2c577ff4-880b-4dc7-a9c8-2dd1a981a125 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Uploading image f10da8bc-00a0-4667-ab61-f221ad2fbf2e {{(pid=69328) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 835.449501] env[69328]: DEBUG oslo_vmware.api [None req-970b7cc7-0a5b-4e6c-8853-38e9d5f3f5be tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3273355, 'name': PowerOffVM_Task, 'duration_secs': 0.291648} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.449759] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-970b7cc7-0a5b-4e6c-8853-38e9d5f3f5be tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 835.449939] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-970b7cc7-0a5b-4e6c-8853-38e9d5f3f5be tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 835.450173] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-af9ad832-6e90-467d-bdf1-339b1d12a6c0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.454608] env[69328]: DEBUG oslo_vmware.rw_handles [None req-2c577ff4-880b-4dc7-a9c8-2dd1a981a125 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 835.454608] env[69328]: value = "vm-653815" [ 835.454608] env[69328]: _type = "VirtualMachine" [ 835.454608] env[69328]: }. {{(pid=69328) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 835.454838] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-3dff7810-2025-4b94-b595-b4638b906168 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.464453] env[69328]: DEBUG nova.compute.utils [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 835.466222] env[69328]: DEBUG oslo_vmware.rw_handles [None req-2c577ff4-880b-4dc7-a9c8-2dd1a981a125 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lease: (returnval){ [ 835.466222] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]529c09e3-322e-be9e-5da5-74193f56e956" [ 835.466222] env[69328]: _type = "HttpNfcLease" [ 835.466222] env[69328]: } obtained for exporting VM: (result){ [ 835.466222] env[69328]: value = "vm-653815" [ 835.466222] env[69328]: _type = "VirtualMachine" [ 835.466222] env[69328]: }. {{(pid=69328) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 835.466429] env[69328]: DEBUG oslo_vmware.api [None req-2c577ff4-880b-4dc7-a9c8-2dd1a981a125 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the lease: (returnval){ [ 835.466429] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]529c09e3-322e-be9e-5da5-74193f56e956" [ 835.466429] env[69328]: _type = "HttpNfcLease" [ 835.466429] env[69328]: } to be ready. {{(pid=69328) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 835.467047] env[69328]: DEBUG nova.compute.manager [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 835.467217] env[69328]: DEBUG nova.network.neutron [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 835.483422] env[69328]: DEBUG oslo_vmware.api [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Task: {'id': task-3273354, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.633495} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.485334] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 4a990411-16cd-4e53-9068-29654b69abe6/4a990411-16cd-4e53-9068-29654b69abe6.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 835.485443] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 835.486332] env[69328]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 835.486332] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]529c09e3-322e-be9e-5da5-74193f56e956" [ 835.486332] env[69328]: _type = "HttpNfcLease" [ 835.486332] env[69328]: } is ready. {{(pid=69328) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 835.486525] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-03e49251-e0b7-4d72-8304-f7b9f7de256b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.488543] env[69328]: DEBUG oslo_vmware.rw_handles [None req-2c577ff4-880b-4dc7-a9c8-2dd1a981a125 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 835.488543] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]529c09e3-322e-be9e-5da5-74193f56e956" [ 835.488543] env[69328]: _type = "HttpNfcLease" [ 835.488543] env[69328]: }. {{(pid=69328) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 835.490115] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02413689-8b6a-433b-a86e-7fc53e4d800a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.499708] env[69328]: DEBUG oslo_vmware.rw_handles [None req-2c577ff4-880b-4dc7-a9c8-2dd1a981a125 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528fd672-9993-43e8-22c4-5536b5a4af45/disk-0.vmdk from lease info. {{(pid=69328) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 835.499965] env[69328]: DEBUG oslo_vmware.rw_handles [None req-2c577ff4-880b-4dc7-a9c8-2dd1a981a125 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528fd672-9993-43e8-22c4-5536b5a4af45/disk-0.vmdk for reading. {{(pid=69328) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 835.503092] env[69328]: DEBUG oslo_vmware.api [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Waiting for the task: (returnval){ [ 835.503092] env[69328]: value = "task-3273358" [ 835.503092] env[69328]: _type = "Task" [ 835.503092] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.511663] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d43f8c96-3d11-438c-86df-2d641f9b6a9e tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Creating Snapshot of the VM instance {{(pid=69328) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 835.566326] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-5269a491-d5f8-42b5-9680-904ec90b8b54 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.574018] env[69328]: DEBUG oslo_vmware.api [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Task: {'id': task-3273358, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071651} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.574580] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 835.576380] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-423debb8-7784-4612-a658-7edc9264bcd0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.578917] env[69328]: DEBUG oslo_vmware.api [None req-d43f8c96-3d11-438c-86df-2d641f9b6a9e tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Waiting for the task: (returnval){ [ 835.578917] env[69328]: value = "task-3273359" [ 835.578917] env[69328]: _type = "Task" [ 835.578917] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.580317] env[69328]: DEBUG nova.policy [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '69ca01fd1d0f42b0b05a5426da9753ad', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '30209bc93a4042488f15c73b7e4733d5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 835.600638] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Reconfiguring VM instance instance-0000003a to attach disk [datastore2] 4a990411-16cd-4e53-9068-29654b69abe6/4a990411-16cd-4e53-9068-29654b69abe6.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 835.601766] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-20f7a9d7-b956-4019-b9a2-73fb3e0e75bb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.623341] env[69328]: DEBUG oslo_vmware.api [None req-d43f8c96-3d11-438c-86df-2d641f9b6a9e tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273359, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.624189] env[69328]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-e0a22e65-f74a-4d4c-ae20-62bc40189d5e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.629859] env[69328]: DEBUG oslo_vmware.api [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Waiting for the task: (returnval){ [ 835.629859] env[69328]: value = "task-3273360" [ 835.629859] env[69328]: _type = "Task" [ 835.629859] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.640522] env[69328]: DEBUG oslo_vmware.api [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Task: {'id': task-3273360, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.795109] env[69328]: DEBUG oslo_vmware.api [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52a51524-a6d3-fe82-79d1-3affd8049b46, 'name': SearchDatastore_Task, 'duration_secs': 0.043181} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.796508] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-65580d4b-822e-485e-abbe-7973571decca {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.803644] env[69328]: DEBUG oslo_vmware.api [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Waiting for the task: (returnval){ [ 835.803644] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52619f22-5bd8-233f-6fd4-c81ea6cb13e0" [ 835.803644] env[69328]: _type = "Task" [ 835.803644] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.805321] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-970b7cc7-0a5b-4e6c-8853-38e9d5f3f5be tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 835.805563] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-970b7cc7-0a5b-4e6c-8853-38e9d5f3f5be tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 835.805736] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-970b7cc7-0a5b-4e6c-8853-38e9d5f3f5be tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Deleting the datastore file [datastore2] 15a8de08-4d20-4329-9867-53e5dff82878 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 835.810718] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-44dc2584-25c6-473e-9c14-303dfedbbcdb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.818022] env[69328]: DEBUG oslo_vmware.api [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52619f22-5bd8-233f-6fd4-c81ea6cb13e0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.819468] env[69328]: DEBUG oslo_vmware.api [None req-970b7cc7-0a5b-4e6c-8853-38e9d5f3f5be tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Waiting for the task: (returnval){ [ 835.819468] env[69328]: value = "task-3273361" [ 835.819468] env[69328]: _type = "Task" [ 835.819468] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.828914] env[69328]: DEBUG oslo_vmware.api [None req-970b7cc7-0a5b-4e6c-8853-38e9d5f3f5be tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3273361, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.932086] env[69328]: INFO nova.compute.manager [-] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Took 1.04 seconds to deallocate network for instance. [ 835.971638] env[69328]: DEBUG nova.compute.manager [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 835.977434] env[69328]: DEBUG nova.network.neutron [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Successfully created port: 766304d2-5559-4007-9fa4-a01027d56e49 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 836.096232] env[69328]: DEBUG oslo_vmware.api [None req-d43f8c96-3d11-438c-86df-2d641f9b6a9e tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273359, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.147768] env[69328]: DEBUG oslo_vmware.api [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Task: {'id': task-3273360, 'name': ReconfigVM_Task, 'duration_secs': 0.487301} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.148457] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Reconfigured VM instance instance-0000003a to attach disk [datastore2] 4a990411-16cd-4e53-9068-29654b69abe6/4a990411-16cd-4e53-9068-29654b69abe6.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 836.149307] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b278b2aa-ea26-48d6-a261-0cf4a603737d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.157777] env[69328]: DEBUG oslo_vmware.api [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Waiting for the task: (returnval){ [ 836.157777] env[69328]: value = "task-3273362" [ 836.157777] env[69328]: _type = "Task" [ 836.157777] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.176513] env[69328]: DEBUG oslo_vmware.api [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Task: {'id': task-3273362, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.298717] env[69328]: DEBUG oslo_concurrency.lockutils [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Acquiring lock "refresh_cache-25fb207b-9388-4198-bb48-ab7cebd43375" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.298717] env[69328]: DEBUG oslo_concurrency.lockutils [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Acquired lock "refresh_cache-25fb207b-9388-4198-bb48-ab7cebd43375" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 836.298921] env[69328]: DEBUG nova.network.neutron [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 836.317469] env[69328]: DEBUG oslo_vmware.api [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52619f22-5bd8-233f-6fd4-c81ea6cb13e0, 'name': SearchDatastore_Task, 'duration_secs': 0.025494} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.321306] env[69328]: DEBUG oslo_concurrency.lockutils [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 836.321703] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] d045c9ca-71f9-411e-9048-71b36c32f4b2/d045c9ca-71f9-411e-9048-71b36c32f4b2.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 836.323436] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d144415a-a9ce-487d-ad88-4ee887b4f4c7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.340199] env[69328]: DEBUG oslo_vmware.api [None req-970b7cc7-0a5b-4e6c-8853-38e9d5f3f5be tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3273361, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.344808] env[69328]: DEBUG oslo_vmware.api [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Waiting for the task: (returnval){ [ 836.344808] env[69328]: value = "task-3273363" [ 836.344808] env[69328]: _type = "Task" [ 836.344808] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.356844] env[69328]: DEBUG oslo_vmware.api [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Task: {'id': task-3273363, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.442776] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9c8fce6a-5227-4ccd-aaeb-edeccfb2e06e tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 836.515226] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ea6cace-c05f-42e8-9d4e-23cc6d2706ef {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.524327] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0c16c2e-2327-499f-ae81-dfd2c47ca5d4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.563380] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9523dc9-cd99-47ea-9c7a-3b471fbc51de {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.573400] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81efca78-b24f-49fc-86a1-2620aa47a532 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.588860] env[69328]: DEBUG nova.compute.provider_tree [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 836.599692] env[69328]: DEBUG oslo_vmware.api [None req-d43f8c96-3d11-438c-86df-2d641f9b6a9e tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273359, 'name': CreateSnapshot_Task, 'duration_secs': 0.980154} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.599970] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d43f8c96-3d11-438c-86df-2d641f9b6a9e tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Created Snapshot of the VM instance {{(pid=69328) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 836.601942] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b0e4b4c-d2a3-4aa3-b0d3-d9c45e03476d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.669808] env[69328]: DEBUG oslo_vmware.api [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Task: {'id': task-3273362, 'name': Rename_Task, 'duration_secs': 0.2475} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.670130] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 836.670457] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-75c3afdd-f153-499b-a9f1-6346767bbb3f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.679124] env[69328]: DEBUG oslo_vmware.api [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Waiting for the task: (returnval){ [ 836.679124] env[69328]: value = "task-3273364" [ 836.679124] env[69328]: _type = "Task" [ 836.679124] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.688946] env[69328]: DEBUG oslo_vmware.api [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Task: {'id': task-3273364, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.838563] env[69328]: DEBUG oslo_vmware.api [None req-970b7cc7-0a5b-4e6c-8853-38e9d5f3f5be tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Task: {'id': task-3273361, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.547984} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.838634] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-970b7cc7-0a5b-4e6c-8853-38e9d5f3f5be tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 836.838820] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-970b7cc7-0a5b-4e6c-8853-38e9d5f3f5be tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 836.839127] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-970b7cc7-0a5b-4e6c-8853-38e9d5f3f5be tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 836.839418] env[69328]: INFO nova.compute.manager [None req-970b7cc7-0a5b-4e6c-8853-38e9d5f3f5be tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Took 1.92 seconds to destroy the instance on the hypervisor. [ 836.839745] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-970b7cc7-0a5b-4e6c-8853-38e9d5f3f5be tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 836.840020] env[69328]: DEBUG nova.compute.manager [-] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 836.840205] env[69328]: DEBUG nova.network.neutron [-] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 836.855964] env[69328]: DEBUG oslo_vmware.api [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Task: {'id': task-3273363, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.986146] env[69328]: DEBUG nova.compute.manager [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 837.024179] env[69328]: DEBUG nova.virt.hardware [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 837.024577] env[69328]: DEBUG nova.virt.hardware [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 837.024817] env[69328]: DEBUG nova.virt.hardware [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 837.025069] env[69328]: DEBUG nova.virt.hardware [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 837.025275] env[69328]: DEBUG nova.virt.hardware [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 837.025577] env[69328]: DEBUG nova.virt.hardware [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 837.025896] env[69328]: DEBUG nova.virt.hardware [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 837.026194] env[69328]: DEBUG nova.virt.hardware [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 837.026455] env[69328]: DEBUG nova.virt.hardware [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 837.026711] env[69328]: DEBUG nova.virt.hardware [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 837.026971] env[69328]: DEBUG nova.virt.hardware [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 837.028098] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f736c8c8-8f89-4dc9-9fe7-2634278bd113 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.039351] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d26459d-95b0-42f6-a55d-84e179e40769 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.096228] env[69328]: DEBUG nova.scheduler.client.report [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 837.123317] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d43f8c96-3d11-438c-86df-2d641f9b6a9e tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Creating linked-clone VM from snapshot {{(pid=69328) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 837.124337] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-9c900fdf-16e1-49b8-a830-10d16112fbe2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.136874] env[69328]: DEBUG oslo_vmware.api [None req-d43f8c96-3d11-438c-86df-2d641f9b6a9e tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Waiting for the task: (returnval){ [ 837.136874] env[69328]: value = "task-3273365" [ 837.136874] env[69328]: _type = "Task" [ 837.136874] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.149067] env[69328]: DEBUG oslo_vmware.api [None req-d43f8c96-3d11-438c-86df-2d641f9b6a9e tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273365, 'name': CloneVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.191699] env[69328]: DEBUG oslo_vmware.api [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Task: {'id': task-3273364, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.356180] env[69328]: DEBUG oslo_vmware.api [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Task: {'id': task-3273363, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.690767} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.358782] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] d045c9ca-71f9-411e-9048-71b36c32f4b2/d045c9ca-71f9-411e-9048-71b36c32f4b2.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 837.359011] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 837.359498] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4dbbfd01-353b-4390-821b-edab358211ba {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.367506] env[69328]: DEBUG oslo_vmware.api [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Waiting for the task: (returnval){ [ 837.367506] env[69328]: value = "task-3273366" [ 837.367506] env[69328]: _type = "Task" [ 837.367506] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.377216] env[69328]: DEBUG oslo_vmware.api [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Task: {'id': task-3273366, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.512199] env[69328]: DEBUG nova.network.neutron [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Updating instance_info_cache with network_info: [{"id": "32db9785-1822-4acf-9971-06db92f35c18", "address": "fa:16:3e:3e:aa:b8", "network": {"id": "4707aea3-ce46-4fe0-bf5c-141ee5596a86", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.119", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ecd184c6b78b4e4297fb93abb94aa37d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32db9785-18", "ovs_interfaceid": "32db9785-1822-4acf-9971-06db92f35c18", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.607044] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.651s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 837.607044] env[69328]: DEBUG nova.compute.manager [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 837.612124] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3d59153c-ee16-4079-8e43-7b01f83b8074 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.358s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 837.612124] env[69328]: DEBUG nova.objects.instance [None req-3d59153c-ee16-4079-8e43-7b01f83b8074 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Lazy-loading 'resources' on Instance uuid 690096cf-a0bd-4db1-ad97-8d8a37ad7c84 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 837.638625] env[69328]: DEBUG nova.compute.manager [req-9cf9c828-09dc-4b9c-a3e1-cd7ce3df4902 req-981292ff-cdd9-4f25-968f-3fe59943cba5 service nova] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Received event network-vif-deleted-f659c974-1a37-4e6b-bbff-d8a0858a6756 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 837.639386] env[69328]: INFO nova.compute.manager [req-9cf9c828-09dc-4b9c-a3e1-cd7ce3df4902 req-981292ff-cdd9-4f25-968f-3fe59943cba5 service nova] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Neutron deleted interface f659c974-1a37-4e6b-bbff-d8a0858a6756; detaching it from the instance and deleting it from the info cache [ 837.639616] env[69328]: DEBUG nova.network.neutron [req-9cf9c828-09dc-4b9c-a3e1-cd7ce3df4902 req-981292ff-cdd9-4f25-968f-3fe59943cba5 service nova] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.655646] env[69328]: DEBUG oslo_vmware.api [None req-d43f8c96-3d11-438c-86df-2d641f9b6a9e tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273365, 'name': CloneVM_Task} progress is 94%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.670934] env[69328]: DEBUG nova.compute.manager [req-96b121ca-a016-4cc6-9d1c-8c006bb86ee5 req-692fc8b6-dbe0-49b6-baa2-7ba970b40fa6 service nova] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Received event network-vif-plugged-766304d2-5559-4007-9fa4-a01027d56e49 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 837.671069] env[69328]: DEBUG oslo_concurrency.lockutils [req-96b121ca-a016-4cc6-9d1c-8c006bb86ee5 req-692fc8b6-dbe0-49b6-baa2-7ba970b40fa6 service nova] Acquiring lock "36f6aab5-2774-402b-9db6-9912f2d5d473-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 837.671320] env[69328]: DEBUG oslo_concurrency.lockutils [req-96b121ca-a016-4cc6-9d1c-8c006bb86ee5 req-692fc8b6-dbe0-49b6-baa2-7ba970b40fa6 service nova] Lock "36f6aab5-2774-402b-9db6-9912f2d5d473-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 837.671596] env[69328]: DEBUG oslo_concurrency.lockutils [req-96b121ca-a016-4cc6-9d1c-8c006bb86ee5 req-692fc8b6-dbe0-49b6-baa2-7ba970b40fa6 service nova] Lock "36f6aab5-2774-402b-9db6-9912f2d5d473-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 837.671932] env[69328]: DEBUG nova.compute.manager [req-96b121ca-a016-4cc6-9d1c-8c006bb86ee5 req-692fc8b6-dbe0-49b6-baa2-7ba970b40fa6 service nova] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] No waiting events found dispatching network-vif-plugged-766304d2-5559-4007-9fa4-a01027d56e49 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 837.672120] env[69328]: WARNING nova.compute.manager [req-96b121ca-a016-4cc6-9d1c-8c006bb86ee5 req-692fc8b6-dbe0-49b6-baa2-7ba970b40fa6 service nova] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Received unexpected event network-vif-plugged-766304d2-5559-4007-9fa4-a01027d56e49 for instance with vm_state building and task_state spawning. [ 837.690739] env[69328]: DEBUG oslo_vmware.api [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Task: {'id': task-3273364, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.774746] env[69328]: DEBUG nova.network.neutron [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Successfully updated port: 766304d2-5559-4007-9fa4-a01027d56e49 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 837.879442] env[69328]: DEBUG oslo_vmware.api [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Task: {'id': task-3273366, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073093} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.879442] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 837.879442] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bcc728e-1fac-49ad-8dc6-6b855130d9f3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.908106] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Reconfiguring VM instance instance-00000039 to attach disk [datastore1] d045c9ca-71f9-411e-9048-71b36c32f4b2/d045c9ca-71f9-411e-9048-71b36c32f4b2.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 837.908106] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b6a76692-42c0-48e1-becd-2cd19f164357 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.929542] env[69328]: DEBUG oslo_vmware.api [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Waiting for the task: (returnval){ [ 837.929542] env[69328]: value = "task-3273367" [ 837.929542] env[69328]: _type = "Task" [ 837.929542] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.938359] env[69328]: DEBUG oslo_vmware.api [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Task: {'id': task-3273367, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.015228] env[69328]: DEBUG oslo_concurrency.lockutils [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Releasing lock "refresh_cache-25fb207b-9388-4198-bb48-ab7cebd43375" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 838.124466] env[69328]: DEBUG nova.network.neutron [-] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.129346] env[69328]: DEBUG nova.compute.utils [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 838.129927] env[69328]: DEBUG nova.compute.manager [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 838.130163] env[69328]: DEBUG nova.network.neutron [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 838.152532] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5ab7512b-8311-4f4a-b725-ba56d9cfcbba {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.167812] env[69328]: DEBUG oslo_vmware.api [None req-d43f8c96-3d11-438c-86df-2d641f9b6a9e tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273365, 'name': CloneVM_Task} progress is 94%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.178921] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6caa1614-0c52-4203-be5b-9a6073b9f1b2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.197220] env[69328]: DEBUG nova.policy [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '43be625728f24af5a2f6a650279d689d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cdc479a290524130b9d17e627a64b65a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 838.216981] env[69328]: DEBUG oslo_vmware.api [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Task: {'id': task-3273364, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.229772] env[69328]: DEBUG nova.compute.manager [req-9cf9c828-09dc-4b9c-a3e1-cd7ce3df4902 req-981292ff-cdd9-4f25-968f-3fe59943cba5 service nova] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Detach interface failed, port_id=f659c974-1a37-4e6b-bbff-d8a0858a6756, reason: Instance 15a8de08-4d20-4329-9867-53e5dff82878 could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 838.278145] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "refresh_cache-36f6aab5-2774-402b-9db6-9912f2d5d473" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.278300] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquired lock "refresh_cache-36f6aab5-2774-402b-9db6-9912f2d5d473" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 838.282023] env[69328]: DEBUG nova.network.neutron [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 838.443465] env[69328]: DEBUG oslo_vmware.api [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Task: {'id': task-3273367, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.546217] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f2ba8d6-fd38-4ad6-83a3-54d7cb96b387 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.570206] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9db3c462-d11c-4da6-9479-9fc50b007945 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.578530] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Updating instance '25fb207b-9388-4198-bb48-ab7cebd43375' progress to 83 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 838.634020] env[69328]: INFO nova.compute.manager [-] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Took 1.79 seconds to deallocate network for instance. [ 838.634020] env[69328]: DEBUG nova.compute.manager [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 838.657672] env[69328]: DEBUG oslo_vmware.api [None req-d43f8c96-3d11-438c-86df-2d641f9b6a9e tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273365, 'name': CloneVM_Task} progress is 94%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.705498] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76234287-b0f9-4d49-a0aa-3dd2d527b9a2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.716401] env[69328]: DEBUG oslo_vmware.api [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Task: {'id': task-3273364, 'name': PowerOnVM_Task, 'duration_secs': 1.756871} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.717411] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34f72fab-c265-4b98-92d2-bd6de44a9d6f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.720574] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 838.720819] env[69328]: INFO nova.compute.manager [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Took 6.04 seconds to spawn the instance on the hypervisor. [ 838.721017] env[69328]: DEBUG nova.compute.manager [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 838.721799] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5aeaecb-f32b-4256-8ff0-494480b41036 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.757772] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f77e8d07-a64f-4d5c-a98f-a7d92294df73 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.767682] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13d089dc-f30d-4c36-8811-86c628de4edf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.782952] env[69328]: DEBUG nova.compute.provider_tree [None req-3d59153c-ee16-4079-8e43-7b01f83b8074 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 838.826707] env[69328]: DEBUG nova.network.neutron [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Successfully created port: 79aa6a07-f43a-499b-9989-2017b35d1615 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 838.830904] env[69328]: DEBUG nova.network.neutron [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 838.942951] env[69328]: DEBUG oslo_vmware.api [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Task: {'id': task-3273367, 'name': ReconfigVM_Task, 'duration_secs': 0.549175} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.942951] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Reconfigured VM instance instance-00000039 to attach disk [datastore1] d045c9ca-71f9-411e-9048-71b36c32f4b2/d045c9ca-71f9-411e-9048-71b36c32f4b2.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 838.943480] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-44e7f8cb-6446-4a62-847b-773c1d43dfe1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.950328] env[69328]: DEBUG oslo_vmware.api [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Waiting for the task: (returnval){ [ 838.950328] env[69328]: value = "task-3273368" [ 838.950328] env[69328]: _type = "Task" [ 838.950328] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.960270] env[69328]: DEBUG oslo_vmware.api [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Task: {'id': task-3273368, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.030607] env[69328]: DEBUG nova.network.neutron [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Updating instance_info_cache with network_info: [{"id": "766304d2-5559-4007-9fa4-a01027d56e49", "address": "fa:16:3e:7b:c7:c8", "network": {"id": "aed15283-4a79-4e99-8b6c-49cf754138de", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1271042528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30209bc93a4042488f15c73b7e4733d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap766304d2-55", "ovs_interfaceid": "766304d2-5559-4007-9fa4-a01027d56e49", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.084792] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 839.085155] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bfb98bf6-15fd-4df0-96f3-1a121fd218bd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.095800] env[69328]: DEBUG oslo_vmware.api [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Waiting for the task: (returnval){ [ 839.095800] env[69328]: value = "task-3273369" [ 839.095800] env[69328]: _type = "Task" [ 839.095800] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.104792] env[69328]: DEBUG oslo_vmware.api [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3273369, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.147668] env[69328]: DEBUG oslo_concurrency.lockutils [None req-970b7cc7-0a5b-4e6c-8853-38e9d5f3f5be tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 839.158573] env[69328]: DEBUG oslo_vmware.api [None req-d43f8c96-3d11-438c-86df-2d641f9b6a9e tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273365, 'name': CloneVM_Task, 'duration_secs': 1.876766} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.158850] env[69328]: INFO nova.virt.vmwareapi.vmops [None req-d43f8c96-3d11-438c-86df-2d641f9b6a9e tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Created linked-clone VM from snapshot [ 839.159696] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22f908b7-508a-4e7d-bca4-32ec8abb6f19 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.167825] env[69328]: DEBUG nova.virt.vmwareapi.images [None req-d43f8c96-3d11-438c-86df-2d641f9b6a9e tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Uploading image 38cbc678-ce07-4326-af85-f08c2919b511 {{(pid=69328) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 839.183356] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-d43f8c96-3d11-438c-86df-2d641f9b6a9e tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Destroying the VM {{(pid=69328) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 839.183662] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-8635317c-7573-487e-bd6d-d1510a5ca02c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.192121] env[69328]: DEBUG oslo_vmware.api [None req-d43f8c96-3d11-438c-86df-2d641f9b6a9e tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Waiting for the task: (returnval){ [ 839.192121] env[69328]: value = "task-3273370" [ 839.192121] env[69328]: _type = "Task" [ 839.192121] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.204024] env[69328]: DEBUG oslo_vmware.api [None req-d43f8c96-3d11-438c-86df-2d641f9b6a9e tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273370, 'name': Destroy_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.266222] env[69328]: INFO nova.compute.manager [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Took 45.05 seconds to build instance. [ 839.289898] env[69328]: DEBUG nova.scheduler.client.report [None req-3d59153c-ee16-4079-8e43-7b01f83b8074 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 839.461983] env[69328]: DEBUG oslo_vmware.api [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Task: {'id': task-3273368, 'name': Rename_Task, 'duration_secs': 0.303108} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.461983] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 839.462927] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-515eadfc-8c7e-472f-b353-9627bf66f987 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.471778] env[69328]: DEBUG oslo_vmware.api [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Waiting for the task: (returnval){ [ 839.471778] env[69328]: value = "task-3273371" [ 839.471778] env[69328]: _type = "Task" [ 839.471778] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.484958] env[69328]: DEBUG oslo_vmware.api [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Task: {'id': task-3273371, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.533826] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Releasing lock "refresh_cache-36f6aab5-2774-402b-9db6-9912f2d5d473" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 839.534313] env[69328]: DEBUG nova.compute.manager [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Instance network_info: |[{"id": "766304d2-5559-4007-9fa4-a01027d56e49", "address": "fa:16:3e:7b:c7:c8", "network": {"id": "aed15283-4a79-4e99-8b6c-49cf754138de", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1271042528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30209bc93a4042488f15c73b7e4733d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap766304d2-55", "ovs_interfaceid": "766304d2-5559-4007-9fa4-a01027d56e49", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 839.534780] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7b:c7:c8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'de7fa486-5f28-44ae-b0cf-72234ff87546', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '766304d2-5559-4007-9fa4-a01027d56e49', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 839.547193] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Creating folder: Project (30209bc93a4042488f15c73b7e4733d5). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 839.547874] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ca8f4e29-0981-4ebe-8923-591f8df7c80b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.561680] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Created folder: Project (30209bc93a4042488f15c73b7e4733d5) in parent group-v653649. [ 839.561918] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Creating folder: Instances. Parent ref: group-v653821. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 839.562219] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e2836648-624b-4d42-a9e3-6c1871acc212 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.576559] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Created folder: Instances in parent group-v653821. [ 839.577611] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 839.578115] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 839.578469] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f78788f4-9dd5-4759-8b73-0ac5ba7588f4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.619258] env[69328]: DEBUG oslo_vmware.api [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3273369, 'name': PowerOnVM_Task, 'duration_secs': 0.430205} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.621558] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 839.621892] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-76097e5b-444d-477e-9bc2-7125a6ea9d9e tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Updating instance '25fb207b-9388-4198-bb48-ab7cebd43375' progress to 100 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 839.627506] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 839.627506] env[69328]: value = "task-3273374" [ 839.627506] env[69328]: _type = "Task" [ 839.627506] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.650408] env[69328]: DEBUG nova.compute.manager [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 839.678041] env[69328]: DEBUG nova.virt.hardware [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 839.678152] env[69328]: DEBUG nova.virt.hardware [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 839.678448] env[69328]: DEBUG nova.virt.hardware [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 839.678526] env[69328]: DEBUG nova.virt.hardware [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 839.678618] env[69328]: DEBUG nova.virt.hardware [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 839.678775] env[69328]: DEBUG nova.virt.hardware [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 839.678994] env[69328]: DEBUG nova.virt.hardware [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 839.679181] env[69328]: DEBUG nova.virt.hardware [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 839.679388] env[69328]: DEBUG nova.virt.hardware [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 839.679560] env[69328]: DEBUG nova.virt.hardware [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 839.679731] env[69328]: DEBUG nova.virt.hardware [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 839.681081] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa59f34a-7064-4e43-864e-dcfd43137f93 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.691810] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3760da4a-2a7e-42c1-9718-d34779236142 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.708387] env[69328]: DEBUG oslo_vmware.api [None req-d43f8c96-3d11-438c-86df-2d641f9b6a9e tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273370, 'name': Destroy_Task} progress is 33%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.767494] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0af0b458-b90c-4cec-a3ef-99a6b1f79b12 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Lock "4a990411-16cd-4e53-9068-29654b69abe6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.287s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 839.795939] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3d59153c-ee16-4079-8e43-7b01f83b8074 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.187s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 839.798352] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9cbdacf4-f698-4c2d-b07c-3d109d551393 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.590s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 839.798608] env[69328]: DEBUG nova.objects.instance [None req-9cbdacf4-f698-4c2d-b07c-3d109d551393 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Lazy-loading 'resources' on Instance uuid afa25f89-ccda-4b77-aaa1-a3b62b53d870 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 839.822461] env[69328]: INFO nova.scheduler.client.report [None req-3d59153c-ee16-4079-8e43-7b01f83b8074 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Deleted allocations for instance 690096cf-a0bd-4db1-ad97-8d8a37ad7c84 [ 839.849208] env[69328]: DEBUG nova.compute.manager [req-755d95fc-1dcb-4e28-9b32-cd992d970cdf req-04f15950-6330-4524-ab0c-eb37aa1d82bd service nova] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Received event network-changed-766304d2-5559-4007-9fa4-a01027d56e49 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 839.849486] env[69328]: DEBUG nova.compute.manager [req-755d95fc-1dcb-4e28-9b32-cd992d970cdf req-04f15950-6330-4524-ab0c-eb37aa1d82bd service nova] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Refreshing instance network info cache due to event network-changed-766304d2-5559-4007-9fa4-a01027d56e49. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 839.849805] env[69328]: DEBUG oslo_concurrency.lockutils [req-755d95fc-1dcb-4e28-9b32-cd992d970cdf req-04f15950-6330-4524-ab0c-eb37aa1d82bd service nova] Acquiring lock "refresh_cache-36f6aab5-2774-402b-9db6-9912f2d5d473" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.849939] env[69328]: DEBUG oslo_concurrency.lockutils [req-755d95fc-1dcb-4e28-9b32-cd992d970cdf req-04f15950-6330-4524-ab0c-eb37aa1d82bd service nova] Acquired lock "refresh_cache-36f6aab5-2774-402b-9db6-9912f2d5d473" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 839.849986] env[69328]: DEBUG nova.network.neutron [req-755d95fc-1dcb-4e28-9b32-cd992d970cdf req-04f15950-6330-4524-ab0c-eb37aa1d82bd service nova] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Refreshing network info cache for port 766304d2-5559-4007-9fa4-a01027d56e49 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 839.983696] env[69328]: DEBUG oslo_vmware.api [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Task: {'id': task-3273371, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.145122] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273374, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.210621] env[69328]: DEBUG oslo_vmware.api [None req-d43f8c96-3d11-438c-86df-2d641f9b6a9e tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273370, 'name': Destroy_Task, 'duration_secs': 0.575777} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.210989] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-d43f8c96-3d11-438c-86df-2d641f9b6a9e tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Destroyed the VM [ 840.211390] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d43f8c96-3d11-438c-86df-2d641f9b6a9e tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Deleting Snapshot of the VM instance {{(pid=69328) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 840.211731] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-8346ef90-8db8-498e-84c0-87acda41722e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.220912] env[69328]: DEBUG oslo_vmware.api [None req-d43f8c96-3d11-438c-86df-2d641f9b6a9e tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Waiting for the task: (returnval){ [ 840.220912] env[69328]: value = "task-3273375" [ 840.220912] env[69328]: _type = "Task" [ 840.220912] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.232490] env[69328]: DEBUG oslo_vmware.api [None req-d43f8c96-3d11-438c-86df-2d641f9b6a9e tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273375, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.336459] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3d59153c-ee16-4079-8e43-7b01f83b8074 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Lock "690096cf-a0bd-4db1-ad97-8d8a37ad7c84" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.711s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 840.493683] env[69328]: DEBUG oslo_vmware.api [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Task: {'id': task-3273371, 'name': PowerOnVM_Task, 'duration_secs': 1.006008} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.494091] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 840.494330] env[69328]: INFO nova.compute.manager [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Took 14.98 seconds to spawn the instance on the hypervisor. [ 840.494555] env[69328]: DEBUG nova.compute.manager [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 840.495947] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a78071ed-a280-4be7-9b49-f24dff202304 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.645598] env[69328]: DEBUG nova.network.neutron [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Successfully updated port: 79aa6a07-f43a-499b-9989-2017b35d1615 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 840.657964] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273374, 'name': CreateVM_Task, 'duration_secs': 0.56641} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.657964] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 840.658187] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.658360] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 840.658710] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 840.659505] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6fff7c05-99e8-4102-a62a-b0a301affab9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.668033] env[69328]: DEBUG oslo_vmware.api [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for the task: (returnval){ [ 840.668033] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ea5c9a-f9ec-8be3-1236-db3041aafeb2" [ 840.668033] env[69328]: _type = "Task" [ 840.668033] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.678884] env[69328]: DEBUG oslo_vmware.api [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ea5c9a-f9ec-8be3-1236-db3041aafeb2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.728330] env[69328]: DEBUG nova.network.neutron [req-755d95fc-1dcb-4e28-9b32-cd992d970cdf req-04f15950-6330-4524-ab0c-eb37aa1d82bd service nova] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Updated VIF entry in instance network info cache for port 766304d2-5559-4007-9fa4-a01027d56e49. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 840.729013] env[69328]: DEBUG nova.network.neutron [req-755d95fc-1dcb-4e28-9b32-cd992d970cdf req-04f15950-6330-4524-ab0c-eb37aa1d82bd service nova] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Updating instance_info_cache with network_info: [{"id": "766304d2-5559-4007-9fa4-a01027d56e49", "address": "fa:16:3e:7b:c7:c8", "network": {"id": "aed15283-4a79-4e99-8b6c-49cf754138de", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1271042528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30209bc93a4042488f15c73b7e4733d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap766304d2-55", "ovs_interfaceid": "766304d2-5559-4007-9fa4-a01027d56e49", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.733460] env[69328]: DEBUG oslo_vmware.api [None req-d43f8c96-3d11-438c-86df-2d641f9b6a9e tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273375, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.864207] env[69328]: INFO nova.compute.manager [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Rebuilding instance [ 840.921138] env[69328]: DEBUG nova.compute.manager [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 840.921138] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e75de08-c4c3-47ca-99a6-d392bf9237f2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.954302] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f621ee0-0d28-4894-86cb-57f28bffdc29 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.963469] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bc95cb6-6121-40bf-9436-44f79e340053 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.001197] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d462dc73-bd46-49d3-9b41-4d96aef9ff2a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.010795] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51216575-0f0b-4425-81c2-9dd2f3afde52 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.034026] env[69328]: DEBUG nova.compute.provider_tree [None req-9cbdacf4-f698-4c2d-b07c-3d109d551393 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 841.036552] env[69328]: INFO nova.compute.manager [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Took 50.95 seconds to build instance. [ 841.157651] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "refresh_cache-6ccd0715-0903-4fed-bf80-240f386e4ad8" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.157846] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquired lock "refresh_cache-6ccd0715-0903-4fed-bf80-240f386e4ad8" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 841.158291] env[69328]: DEBUG nova.network.neutron [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 841.181572] env[69328]: DEBUG oslo_vmware.api [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ea5c9a-f9ec-8be3-1236-db3041aafeb2, 'name': SearchDatastore_Task, 'duration_secs': 0.014755} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.182275] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 841.182671] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 841.183041] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.183774] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 841.183774] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 841.184746] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8dd7aac0-d807-48a3-ab73-f4c67ca9d308 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.198731] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 841.199033] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 841.204961] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-452873d1-6e6c-4d0e-9b79-d0298a07dd0f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.217868] env[69328]: DEBUG oslo_vmware.api [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for the task: (returnval){ [ 841.217868] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ea4e5f-c7bf-3125-6db9-f98132ab39cf" [ 841.217868] env[69328]: _type = "Task" [ 841.217868] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.232793] env[69328]: DEBUG oslo_vmware.api [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ea4e5f-c7bf-3125-6db9-f98132ab39cf, 'name': SearchDatastore_Task, 'duration_secs': 0.012853} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.236850] env[69328]: DEBUG oslo_concurrency.lockutils [req-755d95fc-1dcb-4e28-9b32-cd992d970cdf req-04f15950-6330-4524-ab0c-eb37aa1d82bd service nova] Releasing lock "refresh_cache-36f6aab5-2774-402b-9db6-9912f2d5d473" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 841.237294] env[69328]: DEBUG oslo_vmware.api [None req-d43f8c96-3d11-438c-86df-2d641f9b6a9e tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273375, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.237529] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-173cac63-1854-42dc-856e-bfd04ae48cf4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.245268] env[69328]: DEBUG oslo_vmware.api [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for the task: (returnval){ [ 841.245268] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52dec057-6dfb-c5a4-38fa-bc793144d23f" [ 841.245268] env[69328]: _type = "Task" [ 841.245268] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.257197] env[69328]: DEBUG oslo_vmware.api [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52dec057-6dfb-c5a4-38fa-bc793144d23f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.537654] env[69328]: DEBUG nova.scheduler.client.report [None req-9cbdacf4-f698-4c2d-b07c-3d109d551393 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 841.546331] env[69328]: DEBUG oslo_concurrency.lockutils [None req-56c8a552-e7b1-4d06-850e-ef81818dfd50 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Lock "d045c9ca-71f9-411e-9048-71b36c32f4b2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 75.611s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 841.700173] env[69328]: DEBUG nova.network.neutron [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 841.744894] env[69328]: DEBUG oslo_vmware.api [None req-d43f8c96-3d11-438c-86df-2d641f9b6a9e tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273375, 'name': RemoveSnapshot_Task, 'duration_secs': 1.125613} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.744894] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d43f8c96-3d11-438c-86df-2d641f9b6a9e tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Deleted Snapshot of the VM instance {{(pid=69328) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 841.763509] env[69328]: DEBUG oslo_vmware.api [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52dec057-6dfb-c5a4-38fa-bc793144d23f, 'name': SearchDatastore_Task, 'duration_secs': 0.015203} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.763905] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 841.764236] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 36f6aab5-2774-402b-9db6-9912f2d5d473/36f6aab5-2774-402b-9db6-9912f2d5d473.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 841.765513] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f13e1d88-ec1c-42c9-a7f4-8b4de0210bc9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.777787] env[69328]: DEBUG oslo_vmware.api [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for the task: (returnval){ [ 841.777787] env[69328]: value = "task-3273376" [ 841.777787] env[69328]: _type = "Task" [ 841.777787] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.791705] env[69328]: DEBUG oslo_vmware.api [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273376, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.939797] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 841.940292] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-957a0122-1199-4e93-878d-6cd3c3e4c4bb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.950676] env[69328]: DEBUG oslo_vmware.api [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Waiting for the task: (returnval){ [ 841.950676] env[69328]: value = "task-3273377" [ 841.950676] env[69328]: _type = "Task" [ 841.950676] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.958611] env[69328]: DEBUG nova.network.neutron [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Updating instance_info_cache with network_info: [{"id": "79aa6a07-f43a-499b-9989-2017b35d1615", "address": "fa:16:3e:9d:9e:a9", "network": {"id": "620f277d-ca49-41da-83a0-afb8c393e26e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1223326239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cdc479a290524130b9d17e627a64b65a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86a35d07-53d3-46b3-92cb-ae34236c0f41", "external-id": "nsx-vlan-transportzone-811", "segmentation_id": 811, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79aa6a07-f4", "ovs_interfaceid": "79aa6a07-f43a-499b-9989-2017b35d1615", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 841.967469] env[69328]: DEBUG oslo_vmware.api [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Task: {'id': task-3273377, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.036731] env[69328]: DEBUG nova.compute.manager [req-ff55d22c-d801-4def-95b6-5b667753bc83 req-29f16cb1-fc6d-4161-bc97-33cf515fb76a service nova] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Received event network-vif-plugged-79aa6a07-f43a-499b-9989-2017b35d1615 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 842.038276] env[69328]: DEBUG oslo_concurrency.lockutils [req-ff55d22c-d801-4def-95b6-5b667753bc83 req-29f16cb1-fc6d-4161-bc97-33cf515fb76a service nova] Acquiring lock "6ccd0715-0903-4fed-bf80-240f386e4ad8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 842.038789] env[69328]: DEBUG oslo_concurrency.lockutils [req-ff55d22c-d801-4def-95b6-5b667753bc83 req-29f16cb1-fc6d-4161-bc97-33cf515fb76a service nova] Lock "6ccd0715-0903-4fed-bf80-240f386e4ad8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 842.042121] env[69328]: DEBUG oslo_concurrency.lockutils [req-ff55d22c-d801-4def-95b6-5b667753bc83 req-29f16cb1-fc6d-4161-bc97-33cf515fb76a service nova] Lock "6ccd0715-0903-4fed-bf80-240f386e4ad8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 842.042409] env[69328]: DEBUG nova.compute.manager [req-ff55d22c-d801-4def-95b6-5b667753bc83 req-29f16cb1-fc6d-4161-bc97-33cf515fb76a service nova] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] No waiting events found dispatching network-vif-plugged-79aa6a07-f43a-499b-9989-2017b35d1615 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 842.042579] env[69328]: WARNING nova.compute.manager [req-ff55d22c-d801-4def-95b6-5b667753bc83 req-29f16cb1-fc6d-4161-bc97-33cf515fb76a service nova] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Received unexpected event network-vif-plugged-79aa6a07-f43a-499b-9989-2017b35d1615 for instance with vm_state building and task_state spawning. [ 842.042744] env[69328]: DEBUG nova.compute.manager [req-ff55d22c-d801-4def-95b6-5b667753bc83 req-29f16cb1-fc6d-4161-bc97-33cf515fb76a service nova] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Received event network-changed-79aa6a07-f43a-499b-9989-2017b35d1615 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 842.042919] env[69328]: DEBUG nova.compute.manager [req-ff55d22c-d801-4def-95b6-5b667753bc83 req-29f16cb1-fc6d-4161-bc97-33cf515fb76a service nova] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Refreshing instance network info cache due to event network-changed-79aa6a07-f43a-499b-9989-2017b35d1615. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 842.043156] env[69328]: DEBUG oslo_concurrency.lockutils [req-ff55d22c-d801-4def-95b6-5b667753bc83 req-29f16cb1-fc6d-4161-bc97-33cf515fb76a service nova] Acquiring lock "refresh_cache-6ccd0715-0903-4fed-bf80-240f386e4ad8" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.051250] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9cbdacf4-f698-4c2d-b07c-3d109d551393 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.253s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 842.053896] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.517s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 842.056552] env[69328]: INFO nova.compute.claims [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 842.093945] env[69328]: INFO nova.scheduler.client.report [None req-9cbdacf4-f698-4c2d-b07c-3d109d551393 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Deleted allocations for instance afa25f89-ccda-4b77-aaa1-a3b62b53d870 [ 842.259526] env[69328]: WARNING nova.compute.manager [None req-d43f8c96-3d11-438c-86df-2d641f9b6a9e tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Image not found during snapshot: nova.exception.ImageNotFound: Image 38cbc678-ce07-4326-af85-f08c2919b511 could not be found. [ 842.292654] env[69328]: DEBUG oslo_vmware.api [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273376, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.462154] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Releasing lock "refresh_cache-6ccd0715-0903-4fed-bf80-240f386e4ad8" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 842.462540] env[69328]: DEBUG nova.compute.manager [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Instance network_info: |[{"id": "79aa6a07-f43a-499b-9989-2017b35d1615", "address": "fa:16:3e:9d:9e:a9", "network": {"id": "620f277d-ca49-41da-83a0-afb8c393e26e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1223326239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cdc479a290524130b9d17e627a64b65a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86a35d07-53d3-46b3-92cb-ae34236c0f41", "external-id": "nsx-vlan-transportzone-811", "segmentation_id": 811, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79aa6a07-f4", "ovs_interfaceid": "79aa6a07-f43a-499b-9989-2017b35d1615", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 842.462863] env[69328]: DEBUG oslo_vmware.api [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Task: {'id': task-3273377, 'name': PowerOffVM_Task, 'duration_secs': 0.296118} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.463134] env[69328]: DEBUG oslo_concurrency.lockutils [req-ff55d22c-d801-4def-95b6-5b667753bc83 req-29f16cb1-fc6d-4161-bc97-33cf515fb76a service nova] Acquired lock "refresh_cache-6ccd0715-0903-4fed-bf80-240f386e4ad8" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 842.463320] env[69328]: DEBUG nova.network.neutron [req-ff55d22c-d801-4def-95b6-5b667753bc83 req-29f16cb1-fc6d-4161-bc97-33cf515fb76a service nova] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Refreshing network info cache for port 79aa6a07-f43a-499b-9989-2017b35d1615 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 842.464940] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9d:9e:a9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '86a35d07-53d3-46b3-92cb-ae34236c0f41', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '79aa6a07-f43a-499b-9989-2017b35d1615', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 842.476044] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 842.476396] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 842.477319] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 842.478644] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 842.480877] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0cbc066-cbd8-46fc-9f75-bb6fd3ed3c96 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.486649] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-21aeef71-f265-4554-8752-5ae45a0bb4d6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.510767] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 842.512039] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-20c61fa6-8edf-4c17-aaa0-6d242013d7b5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.515191] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 842.515191] env[69328]: value = "task-3273378" [ 842.515191] env[69328]: _type = "Task" [ 842.515191] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.528930] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273378, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.548318] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 842.548588] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 842.548729] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Deleting the datastore file [datastore2] 4a990411-16cd-4e53-9068-29654b69abe6 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 842.549103] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aca9c9a4-90f5-450d-83f7-9e059e2915d4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.559020] env[69328]: DEBUG oslo_vmware.api [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Waiting for the task: (returnval){ [ 842.559020] env[69328]: value = "task-3273380" [ 842.559020] env[69328]: _type = "Task" [ 842.559020] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.567390] env[69328]: DEBUG oslo_vmware.api [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Task: {'id': task-3273380, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.602413] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9cbdacf4-f698-4c2d-b07c-3d109d551393 tempest-ListServersNegativeTestJSON-612887248 tempest-ListServersNegativeTestJSON-612887248-project-member] Lock "afa25f89-ccda-4b77-aaa1-a3b62b53d870" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.055s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 842.715101] env[69328]: DEBUG oslo_concurrency.lockutils [None req-497f040b-0ba7-4797-927d-dee828c96775 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Acquiring lock "d045c9ca-71f9-411e-9048-71b36c32f4b2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 842.715101] env[69328]: DEBUG oslo_concurrency.lockutils [None req-497f040b-0ba7-4797-927d-dee828c96775 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Lock "d045c9ca-71f9-411e-9048-71b36c32f4b2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 842.715101] env[69328]: DEBUG oslo_concurrency.lockutils [None req-497f040b-0ba7-4797-927d-dee828c96775 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Acquiring lock "d045c9ca-71f9-411e-9048-71b36c32f4b2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 842.715101] env[69328]: DEBUG oslo_concurrency.lockutils [None req-497f040b-0ba7-4797-927d-dee828c96775 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Lock "d045c9ca-71f9-411e-9048-71b36c32f4b2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 842.715101] env[69328]: DEBUG oslo_concurrency.lockutils [None req-497f040b-0ba7-4797-927d-dee828c96775 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Lock "d045c9ca-71f9-411e-9048-71b36c32f4b2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 842.718131] env[69328]: INFO nova.compute.manager [None req-497f040b-0ba7-4797-927d-dee828c96775 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Terminating instance [ 842.791119] env[69328]: DEBUG oslo_vmware.api [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273376, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.706417} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.791610] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 36f6aab5-2774-402b-9db6-9912f2d5d473/36f6aab5-2774-402b-9db6-9912f2d5d473.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 842.794025] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 842.794025] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5ae28289-35af-4868-8116-b9c65dacad45 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.802964] env[69328]: DEBUG oslo_vmware.api [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for the task: (returnval){ [ 842.802964] env[69328]: value = "task-3273381" [ 842.802964] env[69328]: _type = "Task" [ 842.802964] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.813513] env[69328]: DEBUG oslo_vmware.api [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273381, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.025730] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273378, 'name': CreateVM_Task, 'duration_secs': 0.453919} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.025898] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 843.026617] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 843.026776] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 843.027106] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 843.027364] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6c6395a1-4c43-4697-a5b6-535bafe4acce {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.032820] env[69328]: DEBUG oslo_vmware.api [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 843.032820] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ccfadc-909b-1f37-46dc-e1f64479e9d8" [ 843.032820] env[69328]: _type = "Task" [ 843.032820] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.041751] env[69328]: DEBUG oslo_vmware.api [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ccfadc-909b-1f37-46dc-e1f64479e9d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.050998] env[69328]: DEBUG nova.network.neutron [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Port 32db9785-1822-4acf-9971-06db92f35c18 binding to destination host cpu-1 is already ACTIVE {{(pid=69328) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 843.051412] env[69328]: DEBUG oslo_concurrency.lockutils [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Acquiring lock "refresh_cache-25fb207b-9388-4198-bb48-ab7cebd43375" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 843.051640] env[69328]: DEBUG oslo_concurrency.lockutils [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Acquired lock "refresh_cache-25fb207b-9388-4198-bb48-ab7cebd43375" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 843.051872] env[69328]: DEBUG nova.network.neutron [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 843.071763] env[69328]: DEBUG oslo_vmware.api [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Task: {'id': task-3273380, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.290808} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.072273] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 843.072465] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 843.072640] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 843.222945] env[69328]: DEBUG nova.compute.manager [None req-497f040b-0ba7-4797-927d-dee828c96775 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 843.223377] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-497f040b-0ba7-4797-927d-dee828c96775 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 843.228017] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0150f7a-5193-430a-b2c3-ea23ae4f47d2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.233943] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-497f040b-0ba7-4797-927d-dee828c96775 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 843.234285] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fc5e71f3-6aed-4a1b-a920-9969bbf9e4d4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.245137] env[69328]: DEBUG oslo_vmware.api [None req-497f040b-0ba7-4797-927d-dee828c96775 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Waiting for the task: (returnval){ [ 843.245137] env[69328]: value = "task-3273382" [ 843.245137] env[69328]: _type = "Task" [ 843.245137] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.256042] env[69328]: DEBUG oslo_vmware.api [None req-497f040b-0ba7-4797-927d-dee828c96775 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Task: {'id': task-3273382, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.315807] env[69328]: DEBUG oslo_vmware.api [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273381, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.099348} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.316141] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 843.316951] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c69fdc92-99b2-4189-96bd-bb82be71e8ec {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.349820] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Reconfiguring VM instance instance-0000003b to attach disk [datastore1] 36f6aab5-2774-402b-9db6-9912f2d5d473/36f6aab5-2774-402b-9db6-9912f2d5d473.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 843.351654] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4d0961c1-0159-4e4c-be8d-07e84a5b5aa5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.376039] env[69328]: DEBUG oslo_vmware.api [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for the task: (returnval){ [ 843.376039] env[69328]: value = "task-3273383" [ 843.376039] env[69328]: _type = "Task" [ 843.376039] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.385523] env[69328]: DEBUG oslo_vmware.api [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273383, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.407931] env[69328]: DEBUG oslo_concurrency.lockutils [None req-19a2c12f-c0de-4685-897b-3e7ed6f2c4f1 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Acquiring lock "3daf7b73-5679-47ce-b847-f3786f1000d4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 843.408203] env[69328]: DEBUG oslo_concurrency.lockutils [None req-19a2c12f-c0de-4685-897b-3e7ed6f2c4f1 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Lock "3daf7b73-5679-47ce-b847-f3786f1000d4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 843.408785] env[69328]: DEBUG oslo_concurrency.lockutils [None req-19a2c12f-c0de-4685-897b-3e7ed6f2c4f1 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Acquiring lock "3daf7b73-5679-47ce-b847-f3786f1000d4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 843.408785] env[69328]: DEBUG oslo_concurrency.lockutils [None req-19a2c12f-c0de-4685-897b-3e7ed6f2c4f1 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Lock "3daf7b73-5679-47ce-b847-f3786f1000d4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 843.408785] env[69328]: DEBUG oslo_concurrency.lockutils [None req-19a2c12f-c0de-4685-897b-3e7ed6f2c4f1 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Lock "3daf7b73-5679-47ce-b847-f3786f1000d4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 843.412170] env[69328]: INFO nova.compute.manager [None req-19a2c12f-c0de-4685-897b-3e7ed6f2c4f1 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Terminating instance [ 843.533411] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e92d9f8b-8402-4c82-9895-962f56214690 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.556277] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6e1b061-c40f-4961-adfb-9b10d504c363 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.563324] env[69328]: DEBUG oslo_vmware.api [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ccfadc-909b-1f37-46dc-e1f64479e9d8, 'name': SearchDatastore_Task, 'duration_secs': 0.03682} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.564208] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 843.564533] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 843.564946] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 843.564996] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 843.565220] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 843.566423] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ab575757-37c3-42aa-9c30-d410ec093deb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.606086] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1436c3a6-4ba5-40ca-bbeb-f44bc38a2a8d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.609969] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 843.609969] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 843.609969] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-596eb2e4-1540-4de6-b944-3c849db4631e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.616365] env[69328]: DEBUG oslo_vmware.api [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 843.616365] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52bdecaf-7563-b643-474e-8e516476c386" [ 843.616365] env[69328]: _type = "Task" [ 843.616365] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.625094] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38d77fbd-d08e-4963-828c-f708c7e384aa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.636816] env[69328]: DEBUG oslo_vmware.api [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52bdecaf-7563-b643-474e-8e516476c386, 'name': SearchDatastore_Task, 'duration_secs': 0.016422} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.647320] env[69328]: DEBUG nova.compute.provider_tree [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 843.649466] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a7fe6f3-8a8a-4bf0-abb4-8a40b91c7b85 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.660356] env[69328]: DEBUG oslo_vmware.api [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 843.660356] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52accaa9-7f26-7f2d-1577-ac22ce111b40" [ 843.660356] env[69328]: _type = "Task" [ 843.660356] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.669477] env[69328]: DEBUG nova.network.neutron [req-ff55d22c-d801-4def-95b6-5b667753bc83 req-29f16cb1-fc6d-4161-bc97-33cf515fb76a service nova] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Updated VIF entry in instance network info cache for port 79aa6a07-f43a-499b-9989-2017b35d1615. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 843.669837] env[69328]: DEBUG nova.network.neutron [req-ff55d22c-d801-4def-95b6-5b667753bc83 req-29f16cb1-fc6d-4161-bc97-33cf515fb76a service nova] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Updating instance_info_cache with network_info: [{"id": "79aa6a07-f43a-499b-9989-2017b35d1615", "address": "fa:16:3e:9d:9e:a9", "network": {"id": "620f277d-ca49-41da-83a0-afb8c393e26e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1223326239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cdc479a290524130b9d17e627a64b65a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86a35d07-53d3-46b3-92cb-ae34236c0f41", "external-id": "nsx-vlan-transportzone-811", "segmentation_id": 811, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79aa6a07-f4", "ovs_interfaceid": "79aa6a07-f43a-499b-9989-2017b35d1615", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 843.676142] env[69328]: DEBUG oslo_vmware.api [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52accaa9-7f26-7f2d-1577-ac22ce111b40, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.757815] env[69328]: DEBUG oslo_vmware.api [None req-497f040b-0ba7-4797-927d-dee828c96775 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Task: {'id': task-3273382, 'name': PowerOffVM_Task, 'duration_secs': 0.373342} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.758142] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-497f040b-0ba7-4797-927d-dee828c96775 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 843.758306] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-497f040b-0ba7-4797-927d-dee828c96775 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 843.758630] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e7080016-0926-4e88-aa96-a2916b53fccc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.868497] env[69328]: DEBUG nova.network.neutron [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Updating instance_info_cache with network_info: [{"id": "32db9785-1822-4acf-9971-06db92f35c18", "address": "fa:16:3e:3e:aa:b8", "network": {"id": "4707aea3-ce46-4fe0-bf5c-141ee5596a86", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.119", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ecd184c6b78b4e4297fb93abb94aa37d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32db9785-18", "ovs_interfaceid": "32db9785-1822-4acf-9971-06db92f35c18", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 843.888667] env[69328]: DEBUG oslo_vmware.api [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273383, 'name': ReconfigVM_Task, 'duration_secs': 0.465236} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.889029] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Reconfigured VM instance instance-0000003b to attach disk [datastore1] 36f6aab5-2774-402b-9db6-9912f2d5d473/36f6aab5-2774-402b-9db6-9912f2d5d473.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 843.889717] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3b3a7f3f-4430-4ec3-9423-f49d975a961a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.897903] env[69328]: DEBUG oslo_vmware.api [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for the task: (returnval){ [ 843.897903] env[69328]: value = "task-3273385" [ 843.897903] env[69328]: _type = "Task" [ 843.897903] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.908880] env[69328]: DEBUG oslo_vmware.api [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273385, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.918274] env[69328]: DEBUG nova.compute.manager [None req-19a2c12f-c0de-4685-897b-3e7ed6f2c4f1 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 843.918274] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-19a2c12f-c0de-4685-897b-3e7ed6f2c4f1 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 843.918274] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a114762-c0bc-4c79-9271-c3808b5a8ca5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.923747] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-497f040b-0ba7-4797-927d-dee828c96775 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 843.924105] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-497f040b-0ba7-4797-927d-dee828c96775 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 843.924378] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-497f040b-0ba7-4797-927d-dee828c96775 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Deleting the datastore file [datastore1] d045c9ca-71f9-411e-9048-71b36c32f4b2 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 843.925678] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-43442afe-d5bb-45d0-b881-dfa68944efca {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.932113] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-19a2c12f-c0de-4685-897b-3e7ed6f2c4f1 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 843.932472] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-af230f90-13f0-4f9a-a9ef-681cd0d7609a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.935218] env[69328]: DEBUG oslo_vmware.api [None req-497f040b-0ba7-4797-927d-dee828c96775 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Waiting for the task: (returnval){ [ 843.935218] env[69328]: value = "task-3273386" [ 843.935218] env[69328]: _type = "Task" [ 843.935218] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.940307] env[69328]: DEBUG oslo_vmware.api [None req-19a2c12f-c0de-4685-897b-3e7ed6f2c4f1 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Waiting for the task: (returnval){ [ 843.940307] env[69328]: value = "task-3273387" [ 843.940307] env[69328]: _type = "Task" [ 843.940307] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.947353] env[69328]: DEBUG oslo_vmware.api [None req-497f040b-0ba7-4797-927d-dee828c96775 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Task: {'id': task-3273386, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.953036] env[69328]: DEBUG oslo_vmware.api [None req-19a2c12f-c0de-4685-897b-3e7ed6f2c4f1 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273387, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.959893] env[69328]: DEBUG oslo_concurrency.lockutils [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Acquiring lock "5a45bd6a-b063-4104-a85a-d78a4bb9452e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 843.960349] env[69328]: DEBUG oslo_concurrency.lockutils [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Lock "5a45bd6a-b063-4104-a85a-d78a4bb9452e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 844.138790] env[69328]: DEBUG nova.virt.hardware [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 844.138790] env[69328]: DEBUG nova.virt.hardware [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 844.139058] env[69328]: DEBUG nova.virt.hardware [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 844.139378] env[69328]: DEBUG nova.virt.hardware [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 844.139547] env[69328]: DEBUG nova.virt.hardware [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 844.139697] env[69328]: DEBUG nova.virt.hardware [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 844.139961] env[69328]: DEBUG nova.virt.hardware [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 844.140077] env[69328]: DEBUG nova.virt.hardware [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 844.140252] env[69328]: DEBUG nova.virt.hardware [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 844.140411] env[69328]: DEBUG nova.virt.hardware [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 844.140584] env[69328]: DEBUG nova.virt.hardware [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 844.141506] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3df0a79-de82-4292-bcb5-05d08682119e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.151402] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cdf6b3d-b924-410e-aa43-0653f17ccba5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.156877] env[69328]: DEBUG nova.scheduler.client.report [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 844.175358] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Instance VIF info [] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 844.181863] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 844.183625] env[69328]: DEBUG oslo_concurrency.lockutils [req-ff55d22c-d801-4def-95b6-5b667753bc83 req-29f16cb1-fc6d-4161-bc97-33cf515fb76a service nova] Releasing lock "refresh_cache-6ccd0715-0903-4fed-bf80-240f386e4ad8" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 844.184057] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 844.184915] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d331474a-4312-4d5d-b85d-e08fa5f01a57 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.202674] env[69328]: DEBUG oslo_vmware.api [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52accaa9-7f26-7f2d-1577-ac22ce111b40, 'name': SearchDatastore_Task, 'duration_secs': 0.018587} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.206334] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 844.206334] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 6ccd0715-0903-4fed-bf80-240f386e4ad8/6ccd0715-0903-4fed-bf80-240f386e4ad8.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 844.206334] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5cfbff32-798d-4aa2-8640-9754bead68e2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.210437] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 844.210437] env[69328]: value = "task-3273388" [ 844.210437] env[69328]: _type = "Task" [ 844.210437] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.216661] env[69328]: DEBUG oslo_vmware.api [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 844.216661] env[69328]: value = "task-3273389" [ 844.216661] env[69328]: _type = "Task" [ 844.216661] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.224633] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273388, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.236295] env[69328]: DEBUG oslo_vmware.api [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273389, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.376562] env[69328]: DEBUG oslo_concurrency.lockutils [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Releasing lock "refresh_cache-25fb207b-9388-4198-bb48-ab7cebd43375" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 844.412992] env[69328]: DEBUG oslo_vmware.api [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273385, 'name': Rename_Task, 'duration_secs': 0.220622} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.414029] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 844.414029] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-22b07614-8891-4444-af80-45b0ad88eeb6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.423248] env[69328]: DEBUG oslo_vmware.api [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for the task: (returnval){ [ 844.423248] env[69328]: value = "task-3273390" [ 844.423248] env[69328]: _type = "Task" [ 844.423248] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.435632] env[69328]: DEBUG oslo_vmware.api [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273390, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.454998] env[69328]: DEBUG oslo_vmware.api [None req-19a2c12f-c0de-4685-897b-3e7ed6f2c4f1 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273387, 'name': PowerOffVM_Task, 'duration_secs': 0.235948} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.455785] env[69328]: DEBUG oslo_vmware.api [None req-497f040b-0ba7-4797-927d-dee828c96775 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Task: {'id': task-3273386, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.324231} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.455957] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-19a2c12f-c0de-4685-897b-3e7ed6f2c4f1 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 844.456830] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-19a2c12f-c0de-4685-897b-3e7ed6f2c4f1 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 844.457123] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-497f040b-0ba7-4797-927d-dee828c96775 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 844.457321] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-497f040b-0ba7-4797-927d-dee828c96775 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 844.457536] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-497f040b-0ba7-4797-927d-dee828c96775 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 844.457717] env[69328]: INFO nova.compute.manager [None req-497f040b-0ba7-4797-927d-dee828c96775 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Took 1.23 seconds to destroy the instance on the hypervisor. [ 844.457970] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-497f040b-0ba7-4797-927d-dee828c96775 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 844.458207] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ddacd4d6-62d7-499e-98ec-6d52e1a04588 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.460050] env[69328]: DEBUG nova.compute.manager [-] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 844.460159] env[69328]: DEBUG nova.network.neutron [-] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 844.462466] env[69328]: DEBUG nova.compute.manager [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 844.583155] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-19a2c12f-c0de-4685-897b-3e7ed6f2c4f1 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 844.583480] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-19a2c12f-c0de-4685-897b-3e7ed6f2c4f1 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 844.583643] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-19a2c12f-c0de-4685-897b-3e7ed6f2c4f1 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Deleting the datastore file [datastore2] 3daf7b73-5679-47ce-b847-f3786f1000d4 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 844.583936] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3265ec11-2900-4f40-8747-255a1b4f9c52 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.595674] env[69328]: DEBUG oslo_vmware.api [None req-19a2c12f-c0de-4685-897b-3e7ed6f2c4f1 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Waiting for the task: (returnval){ [ 844.595674] env[69328]: value = "task-3273392" [ 844.595674] env[69328]: _type = "Task" [ 844.595674] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.606741] env[69328]: DEBUG oslo_vmware.api [None req-19a2c12f-c0de-4685-897b-3e7ed6f2c4f1 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273392, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.663198] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.609s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 844.663792] env[69328]: DEBUG nova.compute.manager [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 844.669696] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 35.151s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 844.669910] env[69328]: DEBUG nova.objects.instance [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69328) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 844.730783] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273388, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.734317] env[69328]: DEBUG oslo_vmware.api [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273389, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.878658] env[69328]: DEBUG nova.compute.manager [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=69328) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 844.879043] env[69328]: DEBUG oslo_concurrency.lockutils [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 844.936020] env[69328]: DEBUG oslo_vmware.api [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273390, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.998585] env[69328]: DEBUG oslo_concurrency.lockutils [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 845.049752] env[69328]: DEBUG nova.compute.manager [req-49dbfc06-1cb2-41c9-98cf-a6c8ebf18268 req-46aaa5fc-931b-4bdc-8319-57d27c5b53b8 service nova] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Received event network-vif-deleted-cf26672a-3aeb-4534-8776-36a45511c5b4 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 845.049921] env[69328]: INFO nova.compute.manager [req-49dbfc06-1cb2-41c9-98cf-a6c8ebf18268 req-46aaa5fc-931b-4bdc-8319-57d27c5b53b8 service nova] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Neutron deleted interface cf26672a-3aeb-4534-8776-36a45511c5b4; detaching it from the instance and deleting it from the info cache [ 845.050210] env[69328]: DEBUG nova.network.neutron [req-49dbfc06-1cb2-41c9-98cf-a6c8ebf18268 req-46aaa5fc-931b-4bdc-8319-57d27c5b53b8 service nova] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Updating instance_info_cache with network_info: [{"id": "f3f139e6-24e0-47d6-8700-cdcaec9d0b1b", "address": "fa:16:3e:f4:9e:82", "network": {"id": "dd10b920-6fde-40af-85b6-0d58265d3255", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1828541594", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.161", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "f71a072b33154efe9636b50e25f93381", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e365f3b9-706b-4fa2-8f95-ae51b35ab011", "external-id": "nsx-vlan-transportzone-154", "segmentation_id": 154, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3f139e6-24", "ovs_interfaceid": "f3f139e6-24e0-47d6-8700-cdcaec9d0b1b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "046427f3-7078-436b-be68-5df86aa70395", "address": "fa:16:3e:0a:43:8a", "network": {"id": "f42dac65-89fd-461c-a160-43aaa738a5e3", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-710236275", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.130", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f71a072b33154efe9636b50e25f93381", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bd3c6b64-aba2-4bdc-a693-3b4dff3ed861", "external-id": "nsx-vlan-transportzone-600", "segmentation_id": 600, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap046427f3-70", "ovs_interfaceid": "046427f3-7078-436b-be68-5df86aa70395", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 845.106137] env[69328]: DEBUG oslo_vmware.api [None req-19a2c12f-c0de-4685-897b-3e7ed6f2c4f1 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273392, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.500013} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.106137] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-19a2c12f-c0de-4685-897b-3e7ed6f2c4f1 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 845.106473] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-19a2c12f-c0de-4685-897b-3e7ed6f2c4f1 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 845.106756] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-19a2c12f-c0de-4685-897b-3e7ed6f2c4f1 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 845.107057] env[69328]: INFO nova.compute.manager [None req-19a2c12f-c0de-4685-897b-3e7ed6f2c4f1 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Took 1.19 seconds to destroy the instance on the hypervisor. [ 845.107401] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-19a2c12f-c0de-4685-897b-3e7ed6f2c4f1 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 845.108054] env[69328]: DEBUG nova.compute.manager [-] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 845.108253] env[69328]: DEBUG nova.network.neutron [-] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 845.176111] env[69328]: DEBUG nova.compute.utils [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 845.181447] env[69328]: DEBUG nova.compute.manager [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Not allocating networking since 'none' was specified. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 845.235239] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273388, 'name': CreateVM_Task, 'duration_secs': 0.534725} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.240204] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 845.240595] env[69328]: DEBUG oslo_vmware.api [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273389, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.6381} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.241059] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.241342] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 845.241900] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 845.242507] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 6ccd0715-0903-4fed-bf80-240f386e4ad8/6ccd0715-0903-4fed-bf80-240f386e4ad8.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 845.242695] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 845.243052] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-84cabfd0-0cfd-4ca3-98bd-52ba07299293 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.245307] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4a0014e2-b169-4ac3-871e-3ca8c2404e17 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.253654] env[69328]: DEBUG oslo_vmware.api [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 845.253654] env[69328]: value = "task-3273393" [ 845.253654] env[69328]: _type = "Task" [ 845.253654] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.255540] env[69328]: DEBUG oslo_vmware.api [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Waiting for the task: (returnval){ [ 845.255540] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5238c22e-b680-c00c-acbf-e64d2c38f8e2" [ 845.255540] env[69328]: _type = "Task" [ 845.255540] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.272058] env[69328]: DEBUG oslo_vmware.api [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5238c22e-b680-c00c-acbf-e64d2c38f8e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.275937] env[69328]: DEBUG oslo_vmware.api [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273393, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.436018] env[69328]: DEBUG oslo_vmware.api [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273390, 'name': PowerOnVM_Task, 'duration_secs': 0.832369} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.436018] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 845.436018] env[69328]: INFO nova.compute.manager [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Took 8.45 seconds to spawn the instance on the hypervisor. [ 845.436018] env[69328]: DEBUG nova.compute.manager [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 845.436946] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87cfe31d-5f63-4891-b55b-de8fc3b106cb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.556291] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b5f34769-01d2-4de3-a3e8-a50edc9719bd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.565760] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-511e536d-2d31-4c44-8371-8013f9fb9768 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.579396] env[69328]: DEBUG nova.compute.manager [req-51527803-6697-4c2e-812d-27aad17c11b4 req-362549d0-c390-44de-bda5-b05e3d2acfec service nova] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Received event network-vif-deleted-217c955e-5f6e-4245-be2d-e3bb84c2917f {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 845.579568] env[69328]: INFO nova.compute.manager [req-51527803-6697-4c2e-812d-27aad17c11b4 req-362549d0-c390-44de-bda5-b05e3d2acfec service nova] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Neutron deleted interface 217c955e-5f6e-4245-be2d-e3bb84c2917f; detaching it from the instance and deleting it from the info cache [ 845.579805] env[69328]: DEBUG nova.network.neutron [req-51527803-6697-4c2e-812d-27aad17c11b4 req-362549d0-c390-44de-bda5-b05e3d2acfec service nova] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 845.593911] env[69328]: DEBUG oslo_vmware.rw_handles [None req-2c577ff4-880b-4dc7-a9c8-2dd1a981a125 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528fd672-9993-43e8-22c4-5536b5a4af45/disk-0.vmdk. {{(pid=69328) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 845.594856] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9015096-e28b-4587-a700-848afb698333 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.611702] env[69328]: DEBUG nova.compute.manager [req-49dbfc06-1cb2-41c9-98cf-a6c8ebf18268 req-46aaa5fc-931b-4bdc-8319-57d27c5b53b8 service nova] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Detach interface failed, port_id=cf26672a-3aeb-4534-8776-36a45511c5b4, reason: Instance d045c9ca-71f9-411e-9048-71b36c32f4b2 could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 845.618053] env[69328]: DEBUG oslo_vmware.rw_handles [None req-2c577ff4-880b-4dc7-a9c8-2dd1a981a125 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528fd672-9993-43e8-22c4-5536b5a4af45/disk-0.vmdk is in state: ready. {{(pid=69328) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 845.618053] env[69328]: ERROR oslo_vmware.rw_handles [None req-2c577ff4-880b-4dc7-a9c8-2dd1a981a125 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528fd672-9993-43e8-22c4-5536b5a4af45/disk-0.vmdk due to incomplete transfer. [ 845.618053] env[69328]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-c6248a07-a6cf-4274-b216-494f9285d74d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.627068] env[69328]: DEBUG oslo_vmware.rw_handles [None req-2c577ff4-880b-4dc7-a9c8-2dd1a981a125 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528fd672-9993-43e8-22c4-5536b5a4af45/disk-0.vmdk. {{(pid=69328) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 845.627219] env[69328]: DEBUG nova.virt.vmwareapi.images [None req-2c577ff4-880b-4dc7-a9c8-2dd1a981a125 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Uploaded image f10da8bc-00a0-4667-ab61-f221ad2fbf2e to the Glance image server {{(pid=69328) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 845.629799] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c577ff4-880b-4dc7-a9c8-2dd1a981a125 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Destroying the VM {{(pid=69328) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 845.631272] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-ba494401-c99f-4c78-ae5e-c9e5b2188737 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.641044] env[69328]: DEBUG oslo_vmware.api [None req-2c577ff4-880b-4dc7-a9c8-2dd1a981a125 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 845.641044] env[69328]: value = "task-3273394" [ 845.641044] env[69328]: _type = "Task" [ 845.641044] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.663076] env[69328]: DEBUG oslo_vmware.api [None req-2c577ff4-880b-4dc7-a9c8-2dd1a981a125 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273394, 'name': Destroy_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.682365] env[69328]: DEBUG nova.compute.manager [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 845.685778] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c48a1829-cd4c-4aa3-9478-ba37cbcd6b54 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.016s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 845.687493] env[69328]: DEBUG oslo_concurrency.lockutils [None req-03b5f61e-b980-4d3c-9522-27aa6b2948a0 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.147s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 845.688223] env[69328]: DEBUG nova.objects.instance [None req-03b5f61e-b980-4d3c-9522-27aa6b2948a0 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lazy-loading 'resources' on Instance uuid d10bee67-6294-4537-9ce7-4eedb8361ddc {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 845.768038] env[69328]: DEBUG oslo_vmware.api [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273393, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.108764} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.768619] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 845.769878] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d9799e4-403c-4781-bc91-3332c403f408 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.775815] env[69328]: DEBUG oslo_vmware.api [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5238c22e-b680-c00c-acbf-e64d2c38f8e2, 'name': SearchDatastore_Task, 'duration_secs': 0.018542} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.777298] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 845.777549] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 845.777789] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.777972] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 845.778214] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 845.788110] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-715a1e88-9b10-4d71-b649-171dd2df217b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.798349] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Reconfiguring VM instance instance-0000003c to attach disk [datastore2] 6ccd0715-0903-4fed-bf80-240f386e4ad8/6ccd0715-0903-4fed-bf80-240f386e4ad8.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 845.798629] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-58209580-9b61-4b03-8961-d40640c9fdf0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.819312] env[69328]: DEBUG oslo_vmware.api [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 845.819312] env[69328]: value = "task-3273395" [ 845.819312] env[69328]: _type = "Task" [ 845.819312] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.825777] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 845.825959] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 845.829614] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac4d005d-feaa-48d0-974f-c726d3dccf56 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.831875] env[69328]: DEBUG oslo_vmware.api [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273395, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.837065] env[69328]: DEBUG oslo_vmware.api [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Waiting for the task: (returnval){ [ 845.837065] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f9c613-f973-a8e1-27ec-6a86c134000a" [ 845.837065] env[69328]: _type = "Task" [ 845.837065] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.844505] env[69328]: DEBUG oslo_vmware.api [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f9c613-f973-a8e1-27ec-6a86c134000a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.955978] env[69328]: INFO nova.compute.manager [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Took 46.91 seconds to build instance. [ 845.983398] env[69328]: DEBUG nova.network.neutron [-] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 846.083367] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c768b8c0-8f5c-481a-9fd6-be57aba0b7ee {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.094645] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ec3b0e4-6efb-4f9d-a3b8-6768a0da1472 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.130978] env[69328]: DEBUG nova.compute.manager [req-51527803-6697-4c2e-812d-27aad17c11b4 req-362549d0-c390-44de-bda5-b05e3d2acfec service nova] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Detach interface failed, port_id=217c955e-5f6e-4245-be2d-e3bb84c2917f, reason: Instance 3daf7b73-5679-47ce-b847-f3786f1000d4 could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 846.151681] env[69328]: DEBUG oslo_vmware.api [None req-2c577ff4-880b-4dc7-a9c8-2dd1a981a125 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273394, 'name': Destroy_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.176226] env[69328]: DEBUG nova.network.neutron [-] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 846.331904] env[69328]: DEBUG oslo_vmware.api [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273395, 'name': ReconfigVM_Task, 'duration_secs': 0.441292} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.333263] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Reconfigured VM instance instance-0000003c to attach disk [datastore2] 6ccd0715-0903-4fed-bf80-240f386e4ad8/6ccd0715-0903-4fed-bf80-240f386e4ad8.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 846.338983] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-79149f14-9666-4875-8150-44c1ba2d7d52 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.350902] env[69328]: DEBUG oslo_vmware.api [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f9c613-f973-a8e1-27ec-6a86c134000a, 'name': SearchDatastore_Task, 'duration_secs': 0.03068} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.353230] env[69328]: DEBUG oslo_vmware.api [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 846.353230] env[69328]: value = "task-3273396" [ 846.353230] env[69328]: _type = "Task" [ 846.353230] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.353471] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17e36c26-45fa-481e-aafe-d8af6faf8126 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.366010] env[69328]: DEBUG oslo_vmware.api [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Waiting for the task: (returnval){ [ 846.366010] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52dcacc8-1f0a-5207-956c-2a513f19dfcf" [ 846.366010] env[69328]: _type = "Task" [ 846.366010] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.370318] env[69328]: DEBUG oslo_vmware.api [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273396, 'name': Rename_Task} progress is 10%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.379797] env[69328]: DEBUG oslo_vmware.api [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52dcacc8-1f0a-5207-956c-2a513f19dfcf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.458863] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e87684df-a6c8-46a8-8aa7-bfe10f3a2d2b tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "36f6aab5-2774-402b-9db6-9912f2d5d473" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.366s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 846.485907] env[69328]: INFO nova.compute.manager [-] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Took 1.38 seconds to deallocate network for instance. [ 846.664179] env[69328]: DEBUG oslo_vmware.api [None req-2c577ff4-880b-4dc7-a9c8-2dd1a981a125 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273394, 'name': Destroy_Task, 'duration_secs': 0.963284} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.664179] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-2c577ff4-880b-4dc7-a9c8-2dd1a981a125 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Destroyed the VM [ 846.664320] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2c577ff4-880b-4dc7-a9c8-2dd1a981a125 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Deleting Snapshot of the VM instance {{(pid=69328) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 846.664835] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-aacc9687-b92d-407f-94b4-300daf9f816a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.676993] env[69328]: DEBUG oslo_vmware.api [None req-2c577ff4-880b-4dc7-a9c8-2dd1a981a125 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 846.676993] env[69328]: value = "task-3273397" [ 846.676993] env[69328]: _type = "Task" [ 846.676993] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.685478] env[69328]: INFO nova.compute.manager [-] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Took 2.23 seconds to deallocate network for instance. [ 846.694448] env[69328]: DEBUG oslo_vmware.api [None req-2c577ff4-880b-4dc7-a9c8-2dd1a981a125 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273397, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.702491] env[69328]: DEBUG nova.compute.manager [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 846.730864] env[69328]: DEBUG nova.virt.hardware [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 846.731134] env[69328]: DEBUG nova.virt.hardware [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 846.731266] env[69328]: DEBUG nova.virt.hardware [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 846.731401] env[69328]: DEBUG nova.virt.hardware [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 846.731576] env[69328]: DEBUG nova.virt.hardware [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 846.731726] env[69328]: DEBUG nova.virt.hardware [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 846.731932] env[69328]: DEBUG nova.virt.hardware [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 846.732119] env[69328]: DEBUG nova.virt.hardware [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 846.732654] env[69328]: DEBUG nova.virt.hardware [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 846.732654] env[69328]: DEBUG nova.virt.hardware [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 846.732654] env[69328]: DEBUG nova.virt.hardware [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 846.733504] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47bcc385-7f46-416b-a75b-527be37af70d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.738252] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bf4413f-0d55-4fb0-90e6-375b3af8c190 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.748829] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22971316-19c0-43b7-bada-fbe102c0fbe4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.753632] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fda3d46-89d1-4757-9943-2a0857ff8a49 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.794284] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Instance VIF info [] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 846.799929] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Creating folder: Project (b73c20c149bd4be890eab58e0a3bb4bd). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 846.800819] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4bca63b1-721a-4b97-ab7d-db813e792f53 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.803200] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b1ff658-d985-4d01-9063-85a4f9adb80c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.813097] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6e8affb-45e8-4c93-ba08-f8eff4733e00 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.817338] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f1bbabe5-bbef-4ef3-b7b0-228e0c3f9062 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquiring lock "62fa6807-f67d-4bf5-ba23-9e97f9da120e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 846.817690] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f1bbabe5-bbef-4ef3-b7b0-228e0c3f9062 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lock "62fa6807-f67d-4bf5-ba23-9e97f9da120e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 846.819769] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f1bbabe5-bbef-4ef3-b7b0-228e0c3f9062 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquiring lock "62fa6807-f67d-4bf5-ba23-9e97f9da120e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 846.819769] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f1bbabe5-bbef-4ef3-b7b0-228e0c3f9062 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lock "62fa6807-f67d-4bf5-ba23-9e97f9da120e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 846.819769] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f1bbabe5-bbef-4ef3-b7b0-228e0c3f9062 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lock "62fa6807-f67d-4bf5-ba23-9e97f9da120e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 846.822277] env[69328]: INFO nova.compute.manager [None req-f1bbabe5-bbef-4ef3-b7b0-228e0c3f9062 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Terminating instance [ 846.831584] env[69328]: DEBUG nova.compute.provider_tree [None req-03b5f61e-b980-4d3c-9522-27aa6b2948a0 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 846.835253] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Created folder: Project (b73c20c149bd4be890eab58e0a3bb4bd) in parent group-v653649. [ 846.835253] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Creating folder: Instances. Parent ref: group-v653826. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 846.835437] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c5bc43c1-3177-4c8b-ac01-9c48541fd3c9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.848589] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Created folder: Instances in parent group-v653826. [ 846.848893] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 846.849115] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 846.849335] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7a023e93-f96c-4d78-bd75-a3f857b6907f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.870996] env[69328]: DEBUG oslo_vmware.api [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273396, 'name': Rename_Task, 'duration_secs': 0.163519} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.875184] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 846.875427] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 846.875427] env[69328]: value = "task-3273400" [ 846.875427] env[69328]: _type = "Task" [ 846.875427] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.875842] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0a947cd8-4077-42bf-a36d-4a6b22bd8f36 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.884691] env[69328]: DEBUG oslo_vmware.api [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52dcacc8-1f0a-5207-956c-2a513f19dfcf, 'name': SearchDatastore_Task, 'duration_secs': 0.022095} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.885178] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 846.885469] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 4a990411-16cd-4e53-9068-29654b69abe6/4a990411-16cd-4e53-9068-29654b69abe6.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 846.885772] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9ee4a633-75af-4a1f-9b9b-2ffda4a7da32 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.891289] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273400, 'name': CreateVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.893260] env[69328]: DEBUG oslo_vmware.api [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 846.893260] env[69328]: value = "task-3273401" [ 846.893260] env[69328]: _type = "Task" [ 846.893260] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.899029] env[69328]: DEBUG oslo_vmware.api [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Waiting for the task: (returnval){ [ 846.899029] env[69328]: value = "task-3273402" [ 846.899029] env[69328]: _type = "Task" [ 846.899029] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.902185] env[69328]: DEBUG oslo_vmware.api [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273401, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.914029] env[69328]: DEBUG oslo_vmware.api [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Task: {'id': task-3273402, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.006954] env[69328]: DEBUG oslo_concurrency.lockutils [None req-19a2c12f-c0de-4685-897b-3e7ed6f2c4f1 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 847.191478] env[69328]: DEBUG oslo_vmware.api [None req-2c577ff4-880b-4dc7-a9c8-2dd1a981a125 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273397, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.198514] env[69328]: DEBUG oslo_concurrency.lockutils [None req-497f040b-0ba7-4797-927d-dee828c96775 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 847.298112] env[69328]: DEBUG nova.compute.manager [req-47a744ed-0d51-4e3a-b8b5-a75755279965 req-4c74228a-ef43-4566-91f3-5117a6d13baf service nova] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Received event network-vif-deleted-f3f139e6-24e0-47d6-8700-cdcaec9d0b1b {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 847.298436] env[69328]: DEBUG nova.compute.manager [req-47a744ed-0d51-4e3a-b8b5-a75755279965 req-4c74228a-ef43-4566-91f3-5117a6d13baf service nova] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Received event network-vif-deleted-046427f3-7078-436b-be68-5df86aa70395 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 847.298554] env[69328]: DEBUG nova.compute.manager [req-47a744ed-0d51-4e3a-b8b5-a75755279965 req-4c74228a-ef43-4566-91f3-5117a6d13baf service nova] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Received event network-changed-766304d2-5559-4007-9fa4-a01027d56e49 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 847.298715] env[69328]: DEBUG nova.compute.manager [req-47a744ed-0d51-4e3a-b8b5-a75755279965 req-4c74228a-ef43-4566-91f3-5117a6d13baf service nova] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Refreshing instance network info cache due to event network-changed-766304d2-5559-4007-9fa4-a01027d56e49. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 847.298976] env[69328]: DEBUG oslo_concurrency.lockutils [req-47a744ed-0d51-4e3a-b8b5-a75755279965 req-4c74228a-ef43-4566-91f3-5117a6d13baf service nova] Acquiring lock "refresh_cache-36f6aab5-2774-402b-9db6-9912f2d5d473" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.299140] env[69328]: DEBUG oslo_concurrency.lockutils [req-47a744ed-0d51-4e3a-b8b5-a75755279965 req-4c74228a-ef43-4566-91f3-5117a6d13baf service nova] Acquired lock "refresh_cache-36f6aab5-2774-402b-9db6-9912f2d5d473" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 847.299313] env[69328]: DEBUG nova.network.neutron [req-47a744ed-0d51-4e3a-b8b5-a75755279965 req-4c74228a-ef43-4566-91f3-5117a6d13baf service nova] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Refreshing network info cache for port 766304d2-5559-4007-9fa4-a01027d56e49 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 847.334850] env[69328]: DEBUG nova.compute.manager [None req-f1bbabe5-bbef-4ef3-b7b0-228e0c3f9062 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 847.335152] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f1bbabe5-bbef-4ef3-b7b0-228e0c3f9062 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 847.336106] env[69328]: DEBUG nova.scheduler.client.report [None req-03b5f61e-b980-4d3c-9522-27aa6b2948a0 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 847.342924] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b64a9bc-15a3-45aa-93f8-eff4cf50bd93 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.353446] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f1bbabe5-bbef-4ef3-b7b0-228e0c3f9062 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 847.354214] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1a2322b7-4923-419a-96f6-3ecef4e24c8e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.388998] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273400, 'name': CreateVM_Task, 'duration_secs': 0.506896} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.389259] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 847.389737] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.389912] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 847.390280] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 847.390547] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-61991786-aeb6-4166-8453-59fa513ec25a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.398959] env[69328]: DEBUG oslo_vmware.api [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Waiting for the task: (returnval){ [ 847.398959] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b9516f-8205-138b-bbb3-2c427721e05f" [ 847.398959] env[69328]: _type = "Task" [ 847.398959] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.406149] env[69328]: DEBUG oslo_vmware.api [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273401, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.415666] env[69328]: DEBUG oslo_vmware.api [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Task: {'id': task-3273402, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.419084] env[69328]: DEBUG oslo_vmware.api [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b9516f-8205-138b-bbb3-2c427721e05f, 'name': SearchDatastore_Task, 'duration_secs': 0.012036} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.419435] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 847.419713] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 847.419904] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.420061] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 847.420244] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 847.421014] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d3f484ec-623d-4ac9-8604-88cbbdc6834e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.432609] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 847.432710] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 847.433622] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d01b66c1-786a-4346-bc25-1306f38e3f69 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.440442] env[69328]: DEBUG oslo_vmware.api [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Waiting for the task: (returnval){ [ 847.440442] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5264e0f0-3e58-03f5-9adf-804be87f0b21" [ 847.440442] env[69328]: _type = "Task" [ 847.440442] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.450191] env[69328]: DEBUG oslo_vmware.api [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5264e0f0-3e58-03f5-9adf-804be87f0b21, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.628891] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f1bbabe5-bbef-4ef3-b7b0-228e0c3f9062 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 847.629138] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f1bbabe5-bbef-4ef3-b7b0-228e0c3f9062 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 847.629342] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1bbabe5-bbef-4ef3-b7b0-228e0c3f9062 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Deleting the datastore file [datastore2] 62fa6807-f67d-4bf5-ba23-9e97f9da120e {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 847.629619] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-da4e673e-1261-4384-945b-669b7131e5a7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.637057] env[69328]: DEBUG oslo_vmware.api [None req-f1bbabe5-bbef-4ef3-b7b0-228e0c3f9062 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 847.637057] env[69328]: value = "task-3273404" [ 847.637057] env[69328]: _type = "Task" [ 847.637057] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.645961] env[69328]: DEBUG oslo_vmware.api [None req-f1bbabe5-bbef-4ef3-b7b0-228e0c3f9062 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273404, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.650313] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquiring lock "d017d08e-5f9e-4d05-8914-3320d4c87c9b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 847.650556] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lock "d017d08e-5f9e-4d05-8914-3320d4c87c9b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 847.690593] env[69328]: DEBUG oslo_vmware.api [None req-2c577ff4-880b-4dc7-a9c8-2dd1a981a125 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273397, 'name': RemoveSnapshot_Task, 'duration_secs': 0.852298} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.690962] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2c577ff4-880b-4dc7-a9c8-2dd1a981a125 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Deleted Snapshot of the VM instance {{(pid=69328) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 847.691229] env[69328]: INFO nova.compute.manager [None req-2c577ff4-880b-4dc7-a9c8-2dd1a981a125 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Took 16.40 seconds to snapshot the instance on the hypervisor. [ 847.847287] env[69328]: DEBUG oslo_concurrency.lockutils [None req-03b5f61e-b980-4d3c-9522-27aa6b2948a0 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.159s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 847.850554] env[69328]: DEBUG oslo_concurrency.lockutils [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 847.853110] env[69328]: INFO nova.compute.claims [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 847.876027] env[69328]: INFO nova.scheduler.client.report [None req-03b5f61e-b980-4d3c-9522-27aa6b2948a0 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Deleted allocations for instance d10bee67-6294-4537-9ce7-4eedb8361ddc [ 847.911282] env[69328]: DEBUG oslo_vmware.api [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273401, 'name': PowerOnVM_Task, 'duration_secs': 0.659143} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.915751] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 847.916888] env[69328]: INFO nova.compute.manager [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Took 8.27 seconds to spawn the instance on the hypervisor. [ 847.916888] env[69328]: DEBUG nova.compute.manager [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 847.918432] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-808ae4a5-948a-4f45-a2da-7ac389f15454 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.929256] env[69328]: DEBUG oslo_vmware.api [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Task: {'id': task-3273402, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.521115} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.930227] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 4a990411-16cd-4e53-9068-29654b69abe6/4a990411-16cd-4e53-9068-29654b69abe6.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 847.930452] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 847.935337] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1a6b1a4e-fddd-4693-aa6a-c17654bfa83f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.946974] env[69328]: DEBUG oslo_vmware.api [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Waiting for the task: (returnval){ [ 847.946974] env[69328]: value = "task-3273405" [ 847.946974] env[69328]: _type = "Task" [ 847.946974] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.951547] env[69328]: DEBUG oslo_vmware.api [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5264e0f0-3e58-03f5-9adf-804be87f0b21, 'name': SearchDatastore_Task, 'duration_secs': 0.01707} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.956694] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f95fd06-f89e-48da-a8fe-dc7f4fba292a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.969052] env[69328]: DEBUG oslo_vmware.api [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Task: {'id': task-3273405, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.970659] env[69328]: DEBUG oslo_vmware.api [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Waiting for the task: (returnval){ [ 847.970659] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52942b87-e338-ad01-2e39-50ce0f71d0d0" [ 847.970659] env[69328]: _type = "Task" [ 847.970659] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.983044] env[69328]: DEBUG oslo_vmware.api [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52942b87-e338-ad01-2e39-50ce0f71d0d0, 'name': SearchDatastore_Task, 'duration_secs': 0.015639} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.983300] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 847.983557] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] e5a2de79-cfbc-4d9c-8b58-5aa819657978/e5a2de79-cfbc-4d9c-8b58-5aa819657978.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 847.983849] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f7027772-bd2d-41f7-ad48-881d3115fe8c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.992226] env[69328]: DEBUG oslo_vmware.api [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Waiting for the task: (returnval){ [ 847.992226] env[69328]: value = "task-3273406" [ 847.992226] env[69328]: _type = "Task" [ 847.992226] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.004657] env[69328]: DEBUG oslo_vmware.api [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Task: {'id': task-3273406, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.119376] env[69328]: DEBUG nova.network.neutron [req-47a744ed-0d51-4e3a-b8b5-a75755279965 req-4c74228a-ef43-4566-91f3-5117a6d13baf service nova] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Updated VIF entry in instance network info cache for port 766304d2-5559-4007-9fa4-a01027d56e49. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 848.119376] env[69328]: DEBUG nova.network.neutron [req-47a744ed-0d51-4e3a-b8b5-a75755279965 req-4c74228a-ef43-4566-91f3-5117a6d13baf service nova] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Updating instance_info_cache with network_info: [{"id": "766304d2-5559-4007-9fa4-a01027d56e49", "address": "fa:16:3e:7b:c7:c8", "network": {"id": "aed15283-4a79-4e99-8b6c-49cf754138de", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1271042528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.145", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30209bc93a4042488f15c73b7e4733d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap766304d2-55", "ovs_interfaceid": "766304d2-5559-4007-9fa4-a01027d56e49", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 848.150932] env[69328]: DEBUG oslo_vmware.api [None req-f1bbabe5-bbef-4ef3-b7b0-228e0c3f9062 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273404, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.218439} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.151245] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1bbabe5-bbef-4ef3-b7b0-228e0c3f9062 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 848.151452] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f1bbabe5-bbef-4ef3-b7b0-228e0c3f9062 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 848.151819] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f1bbabe5-bbef-4ef3-b7b0-228e0c3f9062 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 848.152046] env[69328]: INFO nova.compute.manager [None req-f1bbabe5-bbef-4ef3-b7b0-228e0c3f9062 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Took 0.82 seconds to destroy the instance on the hypervisor. [ 848.152309] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f1bbabe5-bbef-4ef3-b7b0-228e0c3f9062 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 848.152547] env[69328]: DEBUG nova.compute.manager [-] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 848.152652] env[69328]: DEBUG nova.network.neutron [-] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 848.154801] env[69328]: DEBUG nova.compute.manager [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 848.199549] env[69328]: DEBUG nova.compute.manager [None req-2c577ff4-880b-4dc7-a9c8-2dd1a981a125 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Instance disappeared during snapshot {{(pid=69328) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 848.216539] env[69328]: DEBUG nova.compute.manager [None req-2c577ff4-880b-4dc7-a9c8-2dd1a981a125 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Image not found during clean up f10da8bc-00a0-4667-ab61-f221ad2fbf2e {{(pid=69328) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4601}} [ 848.390106] env[69328]: DEBUG oslo_concurrency.lockutils [None req-03b5f61e-b980-4d3c-9522-27aa6b2948a0 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "d10bee67-6294-4537-9ce7-4eedb8361ddc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.868s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 848.445587] env[69328]: INFO nova.compute.manager [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Took 43.77 seconds to build instance. [ 848.462329] env[69328]: DEBUG oslo_vmware.api [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Task: {'id': task-3273405, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078156} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.463712] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 848.464501] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43298f33-2cd6-4aba-b8a2-f3b60843c9bd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.491160] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Reconfiguring VM instance instance-0000003a to attach disk [datastore2] 4a990411-16cd-4e53-9068-29654b69abe6/4a990411-16cd-4e53-9068-29654b69abe6.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 848.492184] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2fc7d077-f5a0-48f1-b75a-9f982afcabdd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.520128] env[69328]: DEBUG oslo_vmware.api [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Task: {'id': task-3273406, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.520128] env[69328]: DEBUG oslo_vmware.api [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Waiting for the task: (returnval){ [ 848.520128] env[69328]: value = "task-3273407" [ 848.520128] env[69328]: _type = "Task" [ 848.520128] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.529288] env[69328]: DEBUG oslo_vmware.api [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Task: {'id': task-3273407, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.624042] env[69328]: DEBUG oslo_concurrency.lockutils [req-47a744ed-0d51-4e3a-b8b5-a75755279965 req-4c74228a-ef43-4566-91f3-5117a6d13baf service nova] Releasing lock "refresh_cache-36f6aab5-2774-402b-9db6-9912f2d5d473" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 848.688474] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 848.946092] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f99c37a7-2213-4827-a71b-ab670ee27875 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "6ccd0715-0903-4fed-bf80-240f386e4ad8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.276s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 848.971630] env[69328]: DEBUG nova.network.neutron [-] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.020259] env[69328]: DEBUG oslo_vmware.api [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Task: {'id': task-3273406, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.861879} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.020259] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] e5a2de79-cfbc-4d9c-8b58-5aa819657978/e5a2de79-cfbc-4d9c-8b58-5aa819657978.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 849.020259] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 849.020259] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-038fe62f-954e-43d0-9ce1-6d704e719480 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.031365] env[69328]: DEBUG oslo_vmware.api [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Waiting for the task: (returnval){ [ 849.031365] env[69328]: value = "task-3273408" [ 849.031365] env[69328]: _type = "Task" [ 849.031365] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.034645] env[69328]: DEBUG oslo_vmware.api [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Task: {'id': task-3273407, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.045870] env[69328]: DEBUG oslo_vmware.api [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Task: {'id': task-3273408, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.260858] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da7ee632-ca77-45ad-b3c6-f84cefd44506 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.275168] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99a9e4c8-6b95-4843-916a-b069aacbf290 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.309863] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10f900f3-00ff-48db-8e07-dab581b16a16 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.321818] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-326b8c85-b38d-4bea-a77c-799ac74584ca {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.347843] env[69328]: DEBUG nova.compute.provider_tree [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 849.475088] env[69328]: INFO nova.compute.manager [-] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Took 1.32 seconds to deallocate network for instance. [ 849.498443] env[69328]: DEBUG nova.compute.manager [req-610fff17-98a6-4774-85ac-ffda4f7acb91 req-f2250c9e-be35-48ac-8606-81e48ae4e515 service nova] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Received event network-vif-deleted-ad43b1f6-e3ce-4362-856f-82909e1eb51c {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 849.536186] env[69328]: DEBUG oslo_vmware.api [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Task: {'id': task-3273407, 'name': ReconfigVM_Task, 'duration_secs': 0.552768} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.541662] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Reconfigured VM instance instance-0000003a to attach disk [datastore2] 4a990411-16cd-4e53-9068-29654b69abe6/4a990411-16cd-4e53-9068-29654b69abe6.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 849.542563] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e4291f74-cf56-47cf-b358-a71f8b9e1da0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.550461] env[69328]: DEBUG oslo_vmware.api [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Task: {'id': task-3273408, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.096545} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.552030] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 849.552438] env[69328]: DEBUG oslo_vmware.api [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Waiting for the task: (returnval){ [ 849.552438] env[69328]: value = "task-3273409" [ 849.552438] env[69328]: _type = "Task" [ 849.552438] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.553269] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a3b5ea9-6be3-47c5-8597-535474cc7ae7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.579153] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Reconfiguring VM instance instance-0000003d to attach disk [datastore2] e5a2de79-cfbc-4d9c-8b58-5aa819657978/e5a2de79-cfbc-4d9c-8b58-5aa819657978.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 849.582870] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4ab21335-8089-4532-8983-fe2659bc405c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.597442] env[69328]: DEBUG oslo_vmware.api [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Task: {'id': task-3273409, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.603891] env[69328]: DEBUG oslo_vmware.api [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Waiting for the task: (returnval){ [ 849.603891] env[69328]: value = "task-3273410" [ 849.603891] env[69328]: _type = "Task" [ 849.603891] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.612974] env[69328]: DEBUG oslo_vmware.api [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Task: {'id': task-3273410, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.851698] env[69328]: DEBUG nova.scheduler.client.report [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 849.983322] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f1bbabe5-bbef-4ef3-b7b0-228e0c3f9062 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 850.069990] env[69328]: DEBUG oslo_vmware.api [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Task: {'id': task-3273409, 'name': Rename_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.114786] env[69328]: DEBUG oslo_vmware.api [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Task: {'id': task-3273410, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.187617] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "6ccd0715-0903-4fed-bf80-240f386e4ad8" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 850.188219] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "6ccd0715-0903-4fed-bf80-240f386e4ad8" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 850.188482] env[69328]: INFO nova.compute.manager [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Shelving [ 850.357664] env[69328]: DEBUG oslo_concurrency.lockutils [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.507s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 850.358125] env[69328]: DEBUG nova.compute.manager [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 850.360923] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 33.339s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 850.361115] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 850.361266] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69328) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 850.361598] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a1d308d-b267-4d92-84b1-af0eebcb5224 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.860s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 850.361798] env[69328]: DEBUG nova.objects.instance [None req-4a1d308d-b267-4d92-84b1-af0eebcb5224 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Lazy-loading 'resources' on Instance uuid 07b1f872-02bc-471f-97d6-3a781075bee5 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 850.364224] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa8acddf-697f-4539-82c4-20c91b6743f3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.376899] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dfdbdda-2c51-4ca0-a803-770a6b0e57a6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.397324] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0cb74fb-343a-412b-950b-e67e16be0835 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.406064] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44ffe9cd-2563-40bb-b08a-5e744b3913a1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.441135] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179064MB free_disk=116GB free_vcpus=48 pci_devices=None {{(pid=69328) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 850.441296] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 850.568036] env[69328]: DEBUG oslo_vmware.api [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Task: {'id': task-3273409, 'name': Rename_Task, 'duration_secs': 0.69962} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.568036] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 850.568347] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0baf4173-fe3d-4fd7-9ecf-d7d5b74f13cb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.575737] env[69328]: DEBUG oslo_vmware.api [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Waiting for the task: (returnval){ [ 850.575737] env[69328]: value = "task-3273411" [ 850.575737] env[69328]: _type = "Task" [ 850.575737] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.583856] env[69328]: DEBUG oslo_vmware.api [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Task: {'id': task-3273411, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.614546] env[69328]: DEBUG oslo_vmware.api [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Task: {'id': task-3273410, 'name': ReconfigVM_Task, 'duration_secs': 0.736242} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.614828] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Reconfigured VM instance instance-0000003d to attach disk [datastore2] e5a2de79-cfbc-4d9c-8b58-5aa819657978/e5a2de79-cfbc-4d9c-8b58-5aa819657978.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 850.615490] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e0b8d77d-42df-4e37-855b-66998865bdf6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.622526] env[69328]: DEBUG oslo_vmware.api [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Waiting for the task: (returnval){ [ 850.622526] env[69328]: value = "task-3273412" [ 850.622526] env[69328]: _type = "Task" [ 850.622526] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.632063] env[69328]: DEBUG oslo_vmware.api [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Task: {'id': task-3273412, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.650459] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "f1be93b2-08db-41fe-87c4-f4e5f964cfa4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 850.650605] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "f1be93b2-08db-41fe-87c4-f4e5f964cfa4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 850.865227] env[69328]: DEBUG nova.compute.utils [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 850.870719] env[69328]: DEBUG nova.compute.manager [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 850.870917] env[69328]: DEBUG nova.network.neutron [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 850.927079] env[69328]: DEBUG nova.policy [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1685bb9a09d84a7a92306c64f0e5895e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '75d5853e3c724d02bacfa75173e38ab3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 851.086957] env[69328]: DEBUG oslo_vmware.api [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Task: {'id': task-3273411, 'name': PowerOnVM_Task, 'duration_secs': 0.4373} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.087070] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 851.087289] env[69328]: DEBUG nova.compute.manager [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 851.090366] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-193893f3-5702-414a-a107-33b84a04ca78 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.133330] env[69328]: DEBUG oslo_vmware.api [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Task: {'id': task-3273412, 'name': Rename_Task, 'duration_secs': 0.225802} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.136032] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 851.137707] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-274b14ad-0398-4eee-9cf5-8fc6aaa374f2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.147231] env[69328]: DEBUG oslo_vmware.api [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Waiting for the task: (returnval){ [ 851.147231] env[69328]: value = "task-3273413" [ 851.147231] env[69328]: _type = "Task" [ 851.147231] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.155011] env[69328]: DEBUG nova.compute.manager [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 851.162597] env[69328]: DEBUG oslo_vmware.api [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Task: {'id': task-3273413, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.200021] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 851.200446] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-40f29ced-8d49-4ff9-bb6a-a5d662d04e69 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.210175] env[69328]: DEBUG oslo_vmware.api [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 851.210175] env[69328]: value = "task-3273414" [ 851.210175] env[69328]: _type = "Task" [ 851.210175] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.219284] env[69328]: DEBUG oslo_vmware.api [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273414, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.277248] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-293981ea-f4d8-4c9d-ac60-187d202447f0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.287136] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd484a6f-e414-48ea-bf96-ac29c74dcc2d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.320858] env[69328]: DEBUG nova.network.neutron [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Successfully created port: eebd5d04-278d-4e22-9e5d-df5ae37877cf {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 851.324194] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-434f81f9-cb45-4386-93b0-1165e8426765 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.332903] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56f90cec-685d-49b7-a70b-0b8a26db1650 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.348146] env[69328]: DEBUG nova.compute.provider_tree [None req-4a1d308d-b267-4d92-84b1-af0eebcb5224 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 851.371362] env[69328]: DEBUG nova.compute.manager [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 851.608258] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 851.660274] env[69328]: DEBUG oslo_vmware.api [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Task: {'id': task-3273413, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.675718] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 851.720743] env[69328]: DEBUG oslo_vmware.api [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273414, 'name': PowerOffVM_Task, 'duration_secs': 0.206655} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.721011] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 851.721821] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b01a2219-fe5c-4549-b2a5-be98df03326c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.739993] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-380c1c34-07f2-4bbe-8339-7ad49971962c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.852196] env[69328]: DEBUG nova.scheduler.client.report [None req-4a1d308d-b267-4d92-84b1-af0eebcb5224 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 852.158236] env[69328]: DEBUG oslo_vmware.api [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Task: {'id': task-3273413, 'name': PowerOnVM_Task, 'duration_secs': 0.80858} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.158513] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 852.158716] env[69328]: INFO nova.compute.manager [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Took 5.46 seconds to spawn the instance on the hypervisor. [ 852.158970] env[69328]: DEBUG nova.compute.manager [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 852.159788] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ac04619-c677-4e41-afc9-508a9fb21341 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.251215] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Creating Snapshot of the VM instance {{(pid=69328) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 852.251738] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-83469c57-40ff-403b-8d6a-d10ce89435dd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.260748] env[69328]: DEBUG oslo_vmware.api [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 852.260748] env[69328]: value = "task-3273415" [ 852.260748] env[69328]: _type = "Task" [ 852.260748] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.269721] env[69328]: DEBUG oslo_vmware.api [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273415, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.357188] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a1d308d-b267-4d92-84b1-af0eebcb5224 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.995s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 852.359645] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.707s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 852.361755] env[69328]: INFO nova.compute.claims [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 852.375580] env[69328]: INFO nova.scheduler.client.report [None req-4a1d308d-b267-4d92-84b1-af0eebcb5224 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Deleted allocations for instance 07b1f872-02bc-471f-97d6-3a781075bee5 [ 852.380056] env[69328]: DEBUG nova.compute.manager [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 852.408272] env[69328]: DEBUG nova.virt.hardware [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 852.408542] env[69328]: DEBUG nova.virt.hardware [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 852.408733] env[69328]: DEBUG nova.virt.hardware [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 852.408876] env[69328]: DEBUG nova.virt.hardware [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 852.409250] env[69328]: DEBUG nova.virt.hardware [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 852.409451] env[69328]: DEBUG nova.virt.hardware [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 852.409666] env[69328]: DEBUG nova.virt.hardware [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 852.409825] env[69328]: DEBUG nova.virt.hardware [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 852.409990] env[69328]: DEBUG nova.virt.hardware [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 852.410174] env[69328]: DEBUG nova.virt.hardware [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 852.410346] env[69328]: DEBUG nova.virt.hardware [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 852.411622] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63b47da5-bb2a-423e-b8ba-557ff62aa3e0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.422502] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-368dc347-9a32-42b0-91c2-cc774eb17dcf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.525246] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e810f54a-ff1a-4673-a691-033e65f1d8b1 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Acquiring lock "4a990411-16cd-4e53-9068-29654b69abe6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 852.525528] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e810f54a-ff1a-4673-a691-033e65f1d8b1 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Lock "4a990411-16cd-4e53-9068-29654b69abe6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 852.525744] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e810f54a-ff1a-4673-a691-033e65f1d8b1 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Acquiring lock "4a990411-16cd-4e53-9068-29654b69abe6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 852.525932] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e810f54a-ff1a-4673-a691-033e65f1d8b1 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Lock "4a990411-16cd-4e53-9068-29654b69abe6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 852.526118] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e810f54a-ff1a-4673-a691-033e65f1d8b1 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Lock "4a990411-16cd-4e53-9068-29654b69abe6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 852.528195] env[69328]: INFO nova.compute.manager [None req-e810f54a-ff1a-4673-a691-033e65f1d8b1 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Terminating instance [ 852.677324] env[69328]: INFO nova.compute.manager [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Took 44.16 seconds to build instance. [ 852.771538] env[69328]: DEBUG oslo_vmware.api [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273415, 'name': CreateSnapshot_Task, 'duration_secs': 0.401756} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.771799] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Created Snapshot of the VM instance {{(pid=69328) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 852.772562] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61d1ec06-3ef6-4e88-ae6c-38fdc34ced87 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.817237] env[69328]: DEBUG nova.compute.manager [req-cf24ed26-5050-4656-9da8-1c026f3f2706 req-3600c1de-03f1-449f-b3d2-d871270849c3 service nova] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Received event network-vif-plugged-eebd5d04-278d-4e22-9e5d-df5ae37877cf {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 852.817491] env[69328]: DEBUG oslo_concurrency.lockutils [req-cf24ed26-5050-4656-9da8-1c026f3f2706 req-3600c1de-03f1-449f-b3d2-d871270849c3 service nova] Acquiring lock "fd72bae3-cb72-48d0-a0df-9ea3a770a86c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 852.817613] env[69328]: DEBUG oslo_concurrency.lockutils [req-cf24ed26-5050-4656-9da8-1c026f3f2706 req-3600c1de-03f1-449f-b3d2-d871270849c3 service nova] Lock "fd72bae3-cb72-48d0-a0df-9ea3a770a86c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 852.817725] env[69328]: DEBUG oslo_concurrency.lockutils [req-cf24ed26-5050-4656-9da8-1c026f3f2706 req-3600c1de-03f1-449f-b3d2-d871270849c3 service nova] Lock "fd72bae3-cb72-48d0-a0df-9ea3a770a86c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 852.817910] env[69328]: DEBUG nova.compute.manager [req-cf24ed26-5050-4656-9da8-1c026f3f2706 req-3600c1de-03f1-449f-b3d2-d871270849c3 service nova] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] No waiting events found dispatching network-vif-plugged-eebd5d04-278d-4e22-9e5d-df5ae37877cf {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 852.818111] env[69328]: WARNING nova.compute.manager [req-cf24ed26-5050-4656-9da8-1c026f3f2706 req-3600c1de-03f1-449f-b3d2-d871270849c3 service nova] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Received unexpected event network-vif-plugged-eebd5d04-278d-4e22-9e5d-df5ae37877cf for instance with vm_state building and task_state spawning. [ 852.886166] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a1d308d-b267-4d92-84b1-af0eebcb5224 tempest-ServerMetadataNegativeTestJSON-1628468665 tempest-ServerMetadataNegativeTestJSON-1628468665-project-member] Lock "07b1f872-02bc-471f-97d6-3a781075bee5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.024s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 852.923169] env[69328]: DEBUG nova.network.neutron [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Successfully updated port: eebd5d04-278d-4e22-9e5d-df5ae37877cf {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 853.032029] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e810f54a-ff1a-4673-a691-033e65f1d8b1 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Acquiring lock "refresh_cache-4a990411-16cd-4e53-9068-29654b69abe6" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.032029] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e810f54a-ff1a-4673-a691-033e65f1d8b1 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Acquired lock "refresh_cache-4a990411-16cd-4e53-9068-29654b69abe6" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 853.032029] env[69328]: DEBUG nova.network.neutron [None req-e810f54a-ff1a-4673-a691-033e65f1d8b1 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 853.180584] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4a08c480-0654-4685-928e-663dd88ecebc tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Lock "e5a2de79-cfbc-4d9c-8b58-5aa819657978" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.224s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 853.291134] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Creating linked-clone VM from snapshot {{(pid=69328) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 853.291134] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-f826c0c0-6005-4dca-85c2-a7bd83cfb42f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.302035] env[69328]: DEBUG oslo_vmware.api [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 853.302035] env[69328]: value = "task-3273416" [ 853.302035] env[69328]: _type = "Task" [ 853.302035] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.312261] env[69328]: DEBUG oslo_vmware.api [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273416, 'name': CloneVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.425664] env[69328]: DEBUG oslo_concurrency.lockutils [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "refresh_cache-fd72bae3-cb72-48d0-a0df-9ea3a770a86c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.425987] env[69328]: DEBUG oslo_concurrency.lockutils [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquired lock "refresh_cache-fd72bae3-cb72-48d0-a0df-9ea3a770a86c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 853.425987] env[69328]: DEBUG nova.network.neutron [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 853.569611] env[69328]: DEBUG nova.network.neutron [None req-e810f54a-ff1a-4673-a691-033e65f1d8b1 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 853.666794] env[69328]: DEBUG nova.network.neutron [None req-e810f54a-ff1a-4673-a691-033e65f1d8b1 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 853.799805] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92f08b48-86ee-42c5-b12f-efdcdfc1c057 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.813839] env[69328]: DEBUG oslo_vmware.api [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273416, 'name': CloneVM_Task} progress is 94%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.815063] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ef3a91e-9b10-45bf-bb76-9175c9ddc3a9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.851822] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed8a6e52-3d8f-4cdb-adb7-e998b5ece060 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.859487] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f6e741c-e8a6-4557-9569-536819da1ea7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.878017] env[69328]: DEBUG nova.compute.provider_tree [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 853.939795] env[69328]: INFO nova.compute.manager [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Rebuilding instance [ 853.984698] env[69328]: DEBUG nova.network.neutron [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 853.988783] env[69328]: DEBUG nova.compute.manager [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 853.989673] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe0d0d1f-5bf7-4dff-a80b-38ccfed598cc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.172545] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e810f54a-ff1a-4673-a691-033e65f1d8b1 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Releasing lock "refresh_cache-4a990411-16cd-4e53-9068-29654b69abe6" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 854.173107] env[69328]: DEBUG nova.compute.manager [None req-e810f54a-ff1a-4673-a691-033e65f1d8b1 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 854.173271] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e810f54a-ff1a-4673-a691-033e65f1d8b1 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 854.174228] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c175adf7-c869-43f5-a441-5c1579bf0b5d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.184033] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e810f54a-ff1a-4673-a691-033e65f1d8b1 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 854.184304] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f1246a45-b4fe-4775-8e9a-02f7b1f7195a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.192456] env[69328]: DEBUG oslo_vmware.api [None req-e810f54a-ff1a-4673-a691-033e65f1d8b1 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Waiting for the task: (returnval){ [ 854.192456] env[69328]: value = "task-3273417" [ 854.192456] env[69328]: _type = "Task" [ 854.192456] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.202348] env[69328]: DEBUG oslo_vmware.api [None req-e810f54a-ff1a-4673-a691-033e65f1d8b1 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Task: {'id': task-3273417, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.218109] env[69328]: DEBUG nova.network.neutron [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Updating instance_info_cache with network_info: [{"id": "eebd5d04-278d-4e22-9e5d-df5ae37877cf", "address": "fa:16:3e:d8:d9:39", "network": {"id": "e2ab6957-2c9d-4b95-91bd-5a62d9a284ba", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-967470666-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75d5853e3c724d02bacfa75173e38ab3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abcf0d10-3f3f-45dc-923e-1c78766e2dad", "external-id": "nsx-vlan-transportzone-405", "segmentation_id": 405, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeebd5d04-27", "ovs_interfaceid": "eebd5d04-278d-4e22-9e5d-df5ae37877cf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 854.313705] env[69328]: DEBUG oslo_vmware.api [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273416, 'name': CloneVM_Task} progress is 95%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.381107] env[69328]: DEBUG nova.scheduler.client.report [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 854.702857] env[69328]: DEBUG oslo_vmware.api [None req-e810f54a-ff1a-4673-a691-033e65f1d8b1 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Task: {'id': task-3273417, 'name': PowerOffVM_Task, 'duration_secs': 0.214381} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.703335] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e810f54a-ff1a-4673-a691-033e65f1d8b1 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 854.703560] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e810f54a-ff1a-4673-a691-033e65f1d8b1 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 854.703832] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3ba90716-f842-4cd7-b341-97b3e372a70e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.722819] env[69328]: DEBUG oslo_concurrency.lockutils [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Releasing lock "refresh_cache-fd72bae3-cb72-48d0-a0df-9ea3a770a86c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 854.722819] env[69328]: DEBUG nova.compute.manager [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Instance network_info: |[{"id": "eebd5d04-278d-4e22-9e5d-df5ae37877cf", "address": "fa:16:3e:d8:d9:39", "network": {"id": "e2ab6957-2c9d-4b95-91bd-5a62d9a284ba", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-967470666-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75d5853e3c724d02bacfa75173e38ab3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abcf0d10-3f3f-45dc-923e-1c78766e2dad", "external-id": "nsx-vlan-transportzone-405", "segmentation_id": 405, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeebd5d04-27", "ovs_interfaceid": "eebd5d04-278d-4e22-9e5d-df5ae37877cf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 854.722819] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d8:d9:39', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'abcf0d10-3f3f-45dc-923e-1c78766e2dad', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eebd5d04-278d-4e22-9e5d-df5ae37877cf', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 854.731564] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 854.731822] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 854.732830] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4d517131-93e3-40fe-96c5-6d82eebfa3f5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.748900] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e810f54a-ff1a-4673-a691-033e65f1d8b1 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 854.749137] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e810f54a-ff1a-4673-a691-033e65f1d8b1 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 854.749320] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-e810f54a-ff1a-4673-a691-033e65f1d8b1 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Deleting the datastore file [datastore2] 4a990411-16cd-4e53-9068-29654b69abe6 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 854.749978] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5e1b18f7-2835-494a-960e-01d84862c86c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.755339] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 854.755339] env[69328]: value = "task-3273419" [ 854.755339] env[69328]: _type = "Task" [ 854.755339] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.756754] env[69328]: DEBUG oslo_vmware.api [None req-e810f54a-ff1a-4673-a691-033e65f1d8b1 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Waiting for the task: (returnval){ [ 854.756754] env[69328]: value = "task-3273420" [ 854.756754] env[69328]: _type = "Task" [ 854.756754] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.767837] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273419, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.771100] env[69328]: DEBUG oslo_vmware.api [None req-e810f54a-ff1a-4673-a691-033e65f1d8b1 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Task: {'id': task-3273420, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.814692] env[69328]: DEBUG oslo_vmware.api [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273416, 'name': CloneVM_Task, 'duration_secs': 1.33953} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.814692] env[69328]: INFO nova.virt.vmwareapi.vmops [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Created linked-clone VM from snapshot [ 854.815468] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7af2d4d8-7a14-4314-8a3b-31408d60b869 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.823475] env[69328]: DEBUG nova.virt.vmwareapi.images [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Uploading image bbfa0ebf-9220-4057-9a85-eb18d82d6eae {{(pid=69328) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 854.849551] env[69328]: DEBUG nova.compute.manager [req-cd65c8a6-c842-45bc-82a0-c7983e963eef req-418642f3-a71a-49d4-8585-3320db34e000 service nova] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Received event network-changed-eebd5d04-278d-4e22-9e5d-df5ae37877cf {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 854.849551] env[69328]: DEBUG nova.compute.manager [req-cd65c8a6-c842-45bc-82a0-c7983e963eef req-418642f3-a71a-49d4-8585-3320db34e000 service nova] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Refreshing instance network info cache due to event network-changed-eebd5d04-278d-4e22-9e5d-df5ae37877cf. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 854.849551] env[69328]: DEBUG oslo_concurrency.lockutils [req-cd65c8a6-c842-45bc-82a0-c7983e963eef req-418642f3-a71a-49d4-8585-3320db34e000 service nova] Acquiring lock "refresh_cache-fd72bae3-cb72-48d0-a0df-9ea3a770a86c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.849551] env[69328]: DEBUG oslo_concurrency.lockutils [req-cd65c8a6-c842-45bc-82a0-c7983e963eef req-418642f3-a71a-49d4-8585-3320db34e000 service nova] Acquired lock "refresh_cache-fd72bae3-cb72-48d0-a0df-9ea3a770a86c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 854.849551] env[69328]: DEBUG nova.network.neutron [req-cd65c8a6-c842-45bc-82a0-c7983e963eef req-418642f3-a71a-49d4-8585-3320db34e000 service nova] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Refreshing network info cache for port eebd5d04-278d-4e22-9e5d-df5ae37877cf {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 854.860768] env[69328]: DEBUG oslo_vmware.rw_handles [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 854.860768] env[69328]: value = "vm-653830" [ 854.860768] env[69328]: _type = "VirtualMachine" [ 854.860768] env[69328]: }. {{(pid=69328) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 854.861502] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-cf903b86-8ce0-4490-9814-6a19a825b89c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.869827] env[69328]: DEBUG oslo_vmware.rw_handles [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lease: (returnval){ [ 854.869827] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e58eff-6559-a879-18da-a19ff2012021" [ 854.869827] env[69328]: _type = "HttpNfcLease" [ 854.869827] env[69328]: } obtained for exporting VM: (result){ [ 854.869827] env[69328]: value = "vm-653830" [ 854.869827] env[69328]: _type = "VirtualMachine" [ 854.869827] env[69328]: }. {{(pid=69328) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 854.870077] env[69328]: DEBUG oslo_vmware.api [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the lease: (returnval){ [ 854.870077] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e58eff-6559-a879-18da-a19ff2012021" [ 854.870077] env[69328]: _type = "HttpNfcLease" [ 854.870077] env[69328]: } to be ready. {{(pid=69328) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 854.881513] env[69328]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 854.881513] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e58eff-6559-a879-18da-a19ff2012021" [ 854.881513] env[69328]: _type = "HttpNfcLease" [ 854.881513] env[69328]: } is initializing. {{(pid=69328) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 854.887855] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.528s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 854.888464] env[69328]: DEBUG nova.compute.manager [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 854.891672] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.505s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 854.893267] env[69328]: INFO nova.compute.claims [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a0b663eb-31b0-4de1-94bc-660a7d9c1c7b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 855.006181] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 855.006428] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3d0b1e1a-f463-4408-86de-1558eadc2818 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.015187] env[69328]: DEBUG oslo_vmware.api [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Waiting for the task: (returnval){ [ 855.015187] env[69328]: value = "task-3273422" [ 855.015187] env[69328]: _type = "Task" [ 855.015187] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.025152] env[69328]: DEBUG oslo_vmware.api [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Task: {'id': task-3273422, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.272998] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273419, 'name': CreateVM_Task, 'duration_secs': 0.385978} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.276471] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 855.276910] env[69328]: DEBUG oslo_vmware.api [None req-e810f54a-ff1a-4673-a691-033e65f1d8b1 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Task: {'id': task-3273420, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.187485} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.277889] env[69328]: DEBUG oslo_concurrency.lockutils [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.277889] env[69328]: DEBUG oslo_concurrency.lockutils [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 855.279016] env[69328]: DEBUG oslo_concurrency.lockutils [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 855.279016] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-e810f54a-ff1a-4673-a691-033e65f1d8b1 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 855.279016] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e810f54a-ff1a-4673-a691-033e65f1d8b1 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 855.279016] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e810f54a-ff1a-4673-a691-033e65f1d8b1 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 855.279358] env[69328]: INFO nova.compute.manager [None req-e810f54a-ff1a-4673-a691-033e65f1d8b1 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Took 1.11 seconds to destroy the instance on the hypervisor. [ 855.279358] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e810f54a-ff1a-4673-a691-033e65f1d8b1 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 855.279526] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-346d14fc-ccef-4dec-abe9-964d960dc09d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.281122] env[69328]: DEBUG nova.compute.manager [-] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 855.281216] env[69328]: DEBUG nova.network.neutron [-] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 855.286597] env[69328]: DEBUG oslo_vmware.api [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 855.286597] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52599386-d4da-0567-0f69-370130fcc88c" [ 855.286597] env[69328]: _type = "Task" [ 855.286597] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.294845] env[69328]: DEBUG oslo_vmware.api [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52599386-d4da-0567-0f69-370130fcc88c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.299901] env[69328]: DEBUG nova.network.neutron [-] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 855.379266] env[69328]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 855.379266] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e58eff-6559-a879-18da-a19ff2012021" [ 855.379266] env[69328]: _type = "HttpNfcLease" [ 855.379266] env[69328]: } is ready. {{(pid=69328) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 855.379493] env[69328]: DEBUG oslo_vmware.rw_handles [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 855.379493] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e58eff-6559-a879-18da-a19ff2012021" [ 855.379493] env[69328]: _type = "HttpNfcLease" [ 855.379493] env[69328]: }. {{(pid=69328) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 855.380088] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cdbaea0-a0a5-47e4-b074-4276488b3691 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.388605] env[69328]: DEBUG oslo_vmware.rw_handles [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52afc428-3bf1-d962-d483-40ef1c558110/disk-0.vmdk from lease info. {{(pid=69328) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 855.388784] env[69328]: DEBUG oslo_vmware.rw_handles [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52afc428-3bf1-d962-d483-40ef1c558110/disk-0.vmdk for reading. {{(pid=69328) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 855.447045] env[69328]: DEBUG nova.compute.utils [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 855.454029] env[69328]: DEBUG nova.compute.manager [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 855.454238] env[69328]: DEBUG nova.network.neutron [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 855.488900] env[69328]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-ed706a5b-81f0-4030-b899-bcaff2fe39ff {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.521186] env[69328]: DEBUG nova.policy [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '054c88257997491dbbc899b4bf030da6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1ff1b8afdf994141b20b41c0a4088101', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 855.532921] env[69328]: DEBUG oslo_vmware.api [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Task: {'id': task-3273422, 'name': PowerOffVM_Task, 'duration_secs': 0.207752} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.533226] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 855.533898] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 855.534842] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acb240db-35b6-4439-8541-41114e0d8fe2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.542327] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 855.542587] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6567ef6b-3fc4-47fd-a4e2-ff14ed88df95 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.576591] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 855.576805] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 855.576983] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Deleting the datastore file [datastore2] e5a2de79-cfbc-4d9c-8b58-5aa819657978 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 855.577265] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ec5beebb-72fb-4aca-9c4e-9f6c16ac5490 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.584749] env[69328]: DEBUG oslo_vmware.api [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Waiting for the task: (returnval){ [ 855.584749] env[69328]: value = "task-3273424" [ 855.584749] env[69328]: _type = "Task" [ 855.584749] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.596856] env[69328]: DEBUG oslo_vmware.api [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Task: {'id': task-3273424, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.629878] env[69328]: DEBUG nova.network.neutron [req-cd65c8a6-c842-45bc-82a0-c7983e963eef req-418642f3-a71a-49d4-8585-3320db34e000 service nova] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Updated VIF entry in instance network info cache for port eebd5d04-278d-4e22-9e5d-df5ae37877cf. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 855.630377] env[69328]: DEBUG nova.network.neutron [req-cd65c8a6-c842-45bc-82a0-c7983e963eef req-418642f3-a71a-49d4-8585-3320db34e000 service nova] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Updating instance_info_cache with network_info: [{"id": "eebd5d04-278d-4e22-9e5d-df5ae37877cf", "address": "fa:16:3e:d8:d9:39", "network": {"id": "e2ab6957-2c9d-4b95-91bd-5a62d9a284ba", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-967470666-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75d5853e3c724d02bacfa75173e38ab3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abcf0d10-3f3f-45dc-923e-1c78766e2dad", "external-id": "nsx-vlan-transportzone-405", "segmentation_id": 405, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeebd5d04-27", "ovs_interfaceid": "eebd5d04-278d-4e22-9e5d-df5ae37877cf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.806352] env[69328]: DEBUG nova.network.neutron [-] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.808121] env[69328]: DEBUG oslo_vmware.api [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52599386-d4da-0567-0f69-370130fcc88c, 'name': SearchDatastore_Task, 'duration_secs': 0.014838} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.808699] env[69328]: DEBUG oslo_concurrency.lockutils [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 855.808943] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 855.809205] env[69328]: DEBUG oslo_concurrency.lockutils [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.809367] env[69328]: DEBUG oslo_concurrency.lockutils [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 855.809580] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 855.809857] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f1341991-751b-45c6-8bca-04a38d86b264 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.820100] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 855.820428] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 855.822036] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0fcff76e-73f5-48fd-8b54-325305a677cb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.828893] env[69328]: DEBUG oslo_vmware.api [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 855.828893] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]526deefa-fc30-6358-1c7b-0c14d2b713e6" [ 855.828893] env[69328]: _type = "Task" [ 855.828893] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.838401] env[69328]: DEBUG oslo_vmware.api [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]526deefa-fc30-6358-1c7b-0c14d2b713e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.938175] env[69328]: DEBUG nova.network.neutron [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Successfully created port: 7748ad51-059a-4dd5-b929-13b3fbac9d5c {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 855.954341] env[69328]: DEBUG nova.compute.manager [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 856.096382] env[69328]: DEBUG oslo_vmware.api [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Task: {'id': task-3273424, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.225506} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.099665] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 856.100041] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 856.100307] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 856.134438] env[69328]: DEBUG oslo_concurrency.lockutils [req-cd65c8a6-c842-45bc-82a0-c7983e963eef req-418642f3-a71a-49d4-8585-3320db34e000 service nova] Releasing lock "refresh_cache-fd72bae3-cb72-48d0-a0df-9ea3a770a86c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 856.311397] env[69328]: INFO nova.compute.manager [-] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Took 1.03 seconds to deallocate network for instance. [ 856.344856] env[69328]: DEBUG oslo_vmware.api [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]526deefa-fc30-6358-1c7b-0c14d2b713e6, 'name': SearchDatastore_Task, 'duration_secs': 0.01698} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.345795] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0775b13d-4763-4a02-8544-ce1723f0d2d2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.357030] env[69328]: DEBUG oslo_vmware.api [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 856.357030] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52eebf25-f940-bdd5-2181-5c2e5565d9bd" [ 856.357030] env[69328]: _type = "Task" [ 856.357030] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.369360] env[69328]: DEBUG oslo_vmware.api [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52eebf25-f940-bdd5-2181-5c2e5565d9bd, 'name': SearchDatastore_Task, 'duration_secs': 0.010884} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.372629] env[69328]: DEBUG oslo_concurrency.lockutils [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 856.372745] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] fd72bae3-cb72-48d0-a0df-9ea3a770a86c/fd72bae3-cb72-48d0-a0df-9ea3a770a86c.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 856.374276] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8777104b-142a-4707-bfbc-fed73fd34106 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.382012] env[69328]: DEBUG oslo_vmware.api [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 856.382012] env[69328]: value = "task-3273425" [ 856.382012] env[69328]: _type = "Task" [ 856.382012] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.395060] env[69328]: DEBUG oslo_vmware.api [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273425, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.429718] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff8ffea0-98b4-4752-bf51-e77f52870f06 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.438031] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-198b1f25-3fdf-483b-8f75-1b777e6cd9a8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.479122] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b58465fe-1766-4d6f-8d37-376b854ce9b9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.488814] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9de8a935-6540-48bd-b31f-fdd05c78bf50 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.508761] env[69328]: DEBUG nova.compute.provider_tree [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 856.821438] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e810f54a-ff1a-4673-a691-033e65f1d8b1 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 856.894665] env[69328]: DEBUG oslo_vmware.api [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273425, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.980402] env[69328]: DEBUG nova.compute.manager [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 857.012762] env[69328]: DEBUG nova.scheduler.client.report [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 857.018227] env[69328]: DEBUG nova.virt.hardware [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 857.018533] env[69328]: DEBUG nova.virt.hardware [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 857.018656] env[69328]: DEBUG nova.virt.hardware [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 857.018782] env[69328]: DEBUG nova.virt.hardware [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 857.019090] env[69328]: DEBUG nova.virt.hardware [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 857.019253] env[69328]: DEBUG nova.virt.hardware [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 857.019492] env[69328]: DEBUG nova.virt.hardware [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 857.019658] env[69328]: DEBUG nova.virt.hardware [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 857.019825] env[69328]: DEBUG nova.virt.hardware [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 857.020020] env[69328]: DEBUG nova.virt.hardware [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 857.020203] env[69328]: DEBUG nova.virt.hardware [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 857.021074] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ccbb1e3-ddbd-4571-ae64-f06eb3bbbac8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.031601] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-810ecf4e-19d4-4369-a1cb-42d3bd38a3d4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.133459] env[69328]: DEBUG nova.virt.hardware [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 857.133709] env[69328]: DEBUG nova.virt.hardware [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 857.133865] env[69328]: DEBUG nova.virt.hardware [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 857.134144] env[69328]: DEBUG nova.virt.hardware [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 857.134335] env[69328]: DEBUG nova.virt.hardware [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 857.134540] env[69328]: DEBUG nova.virt.hardware [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 857.134918] env[69328]: DEBUG nova.virt.hardware [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 857.135195] env[69328]: DEBUG nova.virt.hardware [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 857.135411] env[69328]: DEBUG nova.virt.hardware [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 857.135623] env[69328]: DEBUG nova.virt.hardware [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 857.135826] env[69328]: DEBUG nova.virt.hardware [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 857.136940] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9128d43b-7583-41e5-98b9-25e8c99e0603 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.145825] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61b69401-6d07-4031-8f42-fe25a7c8f02a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.166878] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Instance VIF info [] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 857.173107] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 857.173534] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 857.174489] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9217cc35-d2d3-4b63-9d4a-eb5f26c2cec4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.194026] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 857.194026] env[69328]: value = "task-3273426" [ 857.194026] env[69328]: _type = "Task" [ 857.194026] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.202360] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273426, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.402426] env[69328]: DEBUG oslo_vmware.api [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273425, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.495508] env[69328]: DEBUG nova.compute.manager [req-1e399f01-a998-4d3f-99b3-25f7f283a614 req-d79359e0-1fe7-4980-a59f-be9055659ed8 service nova] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Received event network-vif-plugged-7748ad51-059a-4dd5-b929-13b3fbac9d5c {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 857.495736] env[69328]: DEBUG oslo_concurrency.lockutils [req-1e399f01-a998-4d3f-99b3-25f7f283a614 req-d79359e0-1fe7-4980-a59f-be9055659ed8 service nova] Acquiring lock "55d9ba65-e5c8-446a-a209-a840f30ff02c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 857.495951] env[69328]: DEBUG oslo_concurrency.lockutils [req-1e399f01-a998-4d3f-99b3-25f7f283a614 req-d79359e0-1fe7-4980-a59f-be9055659ed8 service nova] Lock "55d9ba65-e5c8-446a-a209-a840f30ff02c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 857.496137] env[69328]: DEBUG oslo_concurrency.lockutils [req-1e399f01-a998-4d3f-99b3-25f7f283a614 req-d79359e0-1fe7-4980-a59f-be9055659ed8 service nova] Lock "55d9ba65-e5c8-446a-a209-a840f30ff02c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 857.496494] env[69328]: DEBUG nova.compute.manager [req-1e399f01-a998-4d3f-99b3-25f7f283a614 req-d79359e0-1fe7-4980-a59f-be9055659ed8 service nova] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] No waiting events found dispatching network-vif-plugged-7748ad51-059a-4dd5-b929-13b3fbac9d5c {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 857.496605] env[69328]: WARNING nova.compute.manager [req-1e399f01-a998-4d3f-99b3-25f7f283a614 req-d79359e0-1fe7-4980-a59f-be9055659ed8 service nova] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Received unexpected event network-vif-plugged-7748ad51-059a-4dd5-b929-13b3fbac9d5c for instance with vm_state building and task_state spawning. [ 857.525877] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.635s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 857.526540] env[69328]: DEBUG nova.compute.manager [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a0b663eb-31b0-4de1-94bc-660a7d9c1c7b] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 857.531589] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 33.335s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 857.531589] env[69328]: DEBUG nova.objects.instance [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69328) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 857.613249] env[69328]: DEBUG nova.network.neutron [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Successfully updated port: 7748ad51-059a-4dd5-b929-13b3fbac9d5c {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 857.705355] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273426, 'name': CreateVM_Task, 'duration_secs': 0.355563} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.705547] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 857.705996] env[69328]: DEBUG oslo_concurrency.lockutils [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.706175] env[69328]: DEBUG oslo_concurrency.lockutils [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 857.706501] env[69328]: DEBUG oslo_concurrency.lockutils [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 857.706787] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0f4990a5-705b-4d33-b106-eb7ef48569bb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.712568] env[69328]: DEBUG oslo_vmware.api [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Waiting for the task: (returnval){ [ 857.712568] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5256a0c5-5095-686d-6214-109f5e5f89f9" [ 857.712568] env[69328]: _type = "Task" [ 857.712568] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.723074] env[69328]: DEBUG oslo_vmware.api [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5256a0c5-5095-686d-6214-109f5e5f89f9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.899656] env[69328]: DEBUG oslo_vmware.api [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273425, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.036615] env[69328]: DEBUG nova.compute.utils [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 858.044215] env[69328]: DEBUG nova.compute.manager [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a0b663eb-31b0-4de1-94bc-660a7d9c1c7b] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 858.044412] env[69328]: DEBUG nova.network.neutron [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a0b663eb-31b0-4de1-94bc-660a7d9c1c7b] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 858.091386] env[69328]: DEBUG nova.policy [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a8fbe2a134194d29af48ac8e4986d0cb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd86de4d5055642aa86a29c6768e3db46', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 858.117113] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Acquiring lock "refresh_cache-55d9ba65-e5c8-446a-a209-a840f30ff02c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 858.117252] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Acquired lock "refresh_cache-55d9ba65-e5c8-446a-a209-a840f30ff02c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 858.117445] env[69328]: DEBUG nova.network.neutron [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 858.224133] env[69328]: DEBUG oslo_vmware.api [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5256a0c5-5095-686d-6214-109f5e5f89f9, 'name': SearchDatastore_Task, 'duration_secs': 0.014482} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.224515] env[69328]: DEBUG oslo_concurrency.lockutils [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 858.224798] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 858.225126] env[69328]: DEBUG oslo_concurrency.lockutils [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 858.225310] env[69328]: DEBUG oslo_concurrency.lockutils [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 858.225520] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 858.225845] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f4a8589f-3feb-4f9e-8475-a5c5fd4253c6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.236213] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 858.236441] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 858.237242] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17a2b7be-2009-4121-8aa1-d0c54e3cb34e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.243720] env[69328]: DEBUG oslo_vmware.api [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Waiting for the task: (returnval){ [ 858.243720] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52cd6ff2-1b10-6e82-4f91-2e6f7fb32b84" [ 858.243720] env[69328]: _type = "Task" [ 858.243720] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.253427] env[69328]: DEBUG oslo_vmware.api [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52cd6ff2-1b10-6e82-4f91-2e6f7fb32b84, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.399778] env[69328]: DEBUG oslo_vmware.api [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273425, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.553238} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.400163] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] fd72bae3-cb72-48d0-a0df-9ea3a770a86c/fd72bae3-cb72-48d0-a0df-9ea3a770a86c.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 858.401669] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 858.404349] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dc25e4e9-64ea-4b1b-bf51-8047a056b0e6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.413533] env[69328]: DEBUG oslo_vmware.api [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 858.413533] env[69328]: value = "task-3273427" [ 858.413533] env[69328]: _type = "Task" [ 858.413533] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.423992] env[69328]: DEBUG oslo_vmware.api [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273427, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.485191] env[69328]: DEBUG nova.network.neutron [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a0b663eb-31b0-4de1-94bc-660a7d9c1c7b] Successfully created port: e5103faf-fa4a-4715-b9eb-8469ebb32b28 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 858.544677] env[69328]: DEBUG nova.compute.manager [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a0b663eb-31b0-4de1-94bc-660a7d9c1c7b] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 858.550179] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4995a9b6-2110-47f1-b746-f197182cc9ef tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.019s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 858.550495] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2e298255-278a-43c8-b6fe-8da33338788a tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.496s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 858.550550] env[69328]: DEBUG nova.objects.instance [None req-2e298255-278a-43c8-b6fe-8da33338788a tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Lazy-loading 'resources' on Instance uuid ef7effe4-b37f-4fab-ad24-9d8f72a47ee2 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 858.756185] env[69328]: DEBUG oslo_vmware.api [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52cd6ff2-1b10-6e82-4f91-2e6f7fb32b84, 'name': SearchDatastore_Task, 'duration_secs': 0.014068} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.756819] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-35e9754a-6ce3-4613-84dd-19aa39692f65 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.764864] env[69328]: DEBUG oslo_vmware.api [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Waiting for the task: (returnval){ [ 858.764864] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52783b8f-9bab-a5f0-7882-ac449878222e" [ 858.764864] env[69328]: _type = "Task" [ 858.764864] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.769780] env[69328]: DEBUG nova.network.neutron [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 858.784734] env[69328]: DEBUG oslo_vmware.api [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52783b8f-9bab-a5f0-7882-ac449878222e, 'name': SearchDatastore_Task, 'duration_secs': 0.011496} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.784734] env[69328]: DEBUG oslo_concurrency.lockutils [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 858.784734] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] e5a2de79-cfbc-4d9c-8b58-5aa819657978/e5a2de79-cfbc-4d9c-8b58-5aa819657978.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 858.784734] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4222bc00-47b9-4b6c-a525-991152c6ae2c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.794486] env[69328]: DEBUG oslo_vmware.api [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Waiting for the task: (returnval){ [ 858.794486] env[69328]: value = "task-3273428" [ 858.794486] env[69328]: _type = "Task" [ 858.794486] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.806629] env[69328]: DEBUG oslo_vmware.api [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Task: {'id': task-3273428, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.928624] env[69328]: DEBUG oslo_vmware.api [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273427, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081647} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.928624] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 858.929052] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97c5c3cb-0b0c-4279-bc7e-720959c1d235 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.957853] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Reconfiguring VM instance instance-0000003e to attach disk [datastore2] fd72bae3-cb72-48d0-a0df-9ea3a770a86c/fd72bae3-cb72-48d0-a0df-9ea3a770a86c.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 858.958234] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7d9aa74e-99f9-461f-ac67-1097ca4b63fd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.976061] env[69328]: DEBUG nova.network.neutron [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Updating instance_info_cache with network_info: [{"id": "7748ad51-059a-4dd5-b929-13b3fbac9d5c", "address": "fa:16:3e:19:7f:27", "network": {"id": "3ab35a35-3138-447c-be5b-076dd28b923b", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-953121151-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ff1b8afdf994141b20b41c0a4088101", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7748ad51-05", "ovs_interfaceid": "7748ad51-059a-4dd5-b929-13b3fbac9d5c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 858.984323] env[69328]: DEBUG oslo_vmware.api [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 858.984323] env[69328]: value = "task-3273429" [ 858.984323] env[69328]: _type = "Task" [ 858.984323] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.993706] env[69328]: DEBUG oslo_vmware.api [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273429, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.308025] env[69328]: DEBUG oslo_vmware.api [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Task: {'id': task-3273428, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.482803] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Releasing lock "refresh_cache-55d9ba65-e5c8-446a-a209-a840f30ff02c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 859.483882] env[69328]: DEBUG nova.compute.manager [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Instance network_info: |[{"id": "7748ad51-059a-4dd5-b929-13b3fbac9d5c", "address": "fa:16:3e:19:7f:27", "network": {"id": "3ab35a35-3138-447c-be5b-076dd28b923b", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-953121151-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ff1b8afdf994141b20b41c0a4088101", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7748ad51-05", "ovs_interfaceid": "7748ad51-059a-4dd5-b929-13b3fbac9d5c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 859.483882] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:19:7f:27', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a316376e-2ef0-4b1e-b40c-10321ebd7e1a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7748ad51-059a-4dd5-b929-13b3fbac9d5c', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 859.492834] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Creating folder: Project (1ff1b8afdf994141b20b41c0a4088101). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 859.493272] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c9a2e766-e69f-41ea-ac50-4e255809016a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.507329] env[69328]: DEBUG oslo_vmware.api [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273429, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.514904] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Created folder: Project (1ff1b8afdf994141b20b41c0a4088101) in parent group-v653649. [ 859.514904] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Creating folder: Instances. Parent ref: group-v653833. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 859.514904] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b1c58a99-8035-4876-bdd1-969038a6cd17 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.516560] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceb4982b-0603-4261-9136-8345bf63ad2f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.525164] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05b4d408-ed8d-4e5d-b106-6933baee1373 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.529610] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Created folder: Instances in parent group-v653833. [ 859.529858] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 859.530470] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 859.530714] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5e417407-e59f-4544-94b8-dfa3137c5e79 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.572948] env[69328]: DEBUG nova.compute.manager [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a0b663eb-31b0-4de1-94bc-660a7d9c1c7b] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 859.576915] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d28ae9c-0041-4a72-80b8-dfe2afece2a9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.582077] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 859.582077] env[69328]: value = "task-3273432" [ 859.582077] env[69328]: _type = "Task" [ 859.582077] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.588233] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ce20bd4-a807-47ee-89a4-40331f0c53ba {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.600224] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273432, 'name': CreateVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.602640] env[69328]: DEBUG nova.virt.hardware [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 859.602890] env[69328]: DEBUG nova.virt.hardware [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 859.603070] env[69328]: DEBUG nova.virt.hardware [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 859.603262] env[69328]: DEBUG nova.virt.hardware [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 859.603409] env[69328]: DEBUG nova.virt.hardware [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 859.603556] env[69328]: DEBUG nova.virt.hardware [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 859.603778] env[69328]: DEBUG nova.virt.hardware [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 859.603931] env[69328]: DEBUG nova.virt.hardware [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 859.604136] env[69328]: DEBUG nova.virt.hardware [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 859.604287] env[69328]: DEBUG nova.virt.hardware [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 859.604466] env[69328]: DEBUG nova.virt.hardware [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 859.616790] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd9018be-9e76-4b42-9e25-fa58fe821050 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.620008] env[69328]: DEBUG nova.compute.provider_tree [None req-2e298255-278a-43c8-b6fe-8da33338788a tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 859.627942] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a88707e3-9e26-4751-8154-77d144bf10cd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.803950] env[69328]: DEBUG nova.compute.manager [req-7fe5833d-6558-4a01-8b8d-85ef086fa37b req-5ca4ead3-4bc7-47fd-b141-3e1d7126277d service nova] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Received event network-changed-7748ad51-059a-4dd5-b929-13b3fbac9d5c {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 859.804219] env[69328]: DEBUG nova.compute.manager [req-7fe5833d-6558-4a01-8b8d-85ef086fa37b req-5ca4ead3-4bc7-47fd-b141-3e1d7126277d service nova] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Refreshing instance network info cache due to event network-changed-7748ad51-059a-4dd5-b929-13b3fbac9d5c. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 859.804514] env[69328]: DEBUG oslo_concurrency.lockutils [req-7fe5833d-6558-4a01-8b8d-85ef086fa37b req-5ca4ead3-4bc7-47fd-b141-3e1d7126277d service nova] Acquiring lock "refresh_cache-55d9ba65-e5c8-446a-a209-a840f30ff02c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 859.804702] env[69328]: DEBUG oslo_concurrency.lockutils [req-7fe5833d-6558-4a01-8b8d-85ef086fa37b req-5ca4ead3-4bc7-47fd-b141-3e1d7126277d service nova] Acquired lock "refresh_cache-55d9ba65-e5c8-446a-a209-a840f30ff02c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 859.804977] env[69328]: DEBUG nova.network.neutron [req-7fe5833d-6558-4a01-8b8d-85ef086fa37b req-5ca4ead3-4bc7-47fd-b141-3e1d7126277d service nova] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Refreshing network info cache for port 7748ad51-059a-4dd5-b929-13b3fbac9d5c {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 859.813058] env[69328]: DEBUG oslo_vmware.api [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Task: {'id': task-3273428, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.551779} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.813733] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] e5a2de79-cfbc-4d9c-8b58-5aa819657978/e5a2de79-cfbc-4d9c-8b58-5aa819657978.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 859.813993] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 859.814265] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-50bc45af-11d6-4459-b336-fe23a892acf9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.822828] env[69328]: DEBUG oslo_vmware.api [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Waiting for the task: (returnval){ [ 859.822828] env[69328]: value = "task-3273433" [ 859.822828] env[69328]: _type = "Task" [ 859.822828] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.834752] env[69328]: DEBUG oslo_vmware.api [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Task: {'id': task-3273433, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.003917] env[69328]: DEBUG oslo_vmware.api [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273429, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.094475] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273432, 'name': CreateVM_Task} progress is 25%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.125970] env[69328]: DEBUG nova.scheduler.client.report [None req-2e298255-278a-43c8-b6fe-8da33338788a tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 860.246947] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Acquiring lock "a0952fdf-5570-4112-bc4d-e9f9cee1599c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 860.247229] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Lock "a0952fdf-5570-4112-bc4d-e9f9cee1599c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 860.334400] env[69328]: DEBUG oslo_vmware.api [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Task: {'id': task-3273433, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066949} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.334400] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 860.335231] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78ff8047-b482-4653-a991-e13efad881d7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.361033] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Reconfiguring VM instance instance-0000003d to attach disk [datastore2] e5a2de79-cfbc-4d9c-8b58-5aa819657978/e5a2de79-cfbc-4d9c-8b58-5aa819657978.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 860.361957] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c81b977d-c79b-454a-a4df-5c821e7d80c8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.388417] env[69328]: DEBUG oslo_vmware.api [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Waiting for the task: (returnval){ [ 860.388417] env[69328]: value = "task-3273434" [ 860.388417] env[69328]: _type = "Task" [ 860.388417] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.397846] env[69328]: DEBUG oslo_vmware.api [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Task: {'id': task-3273434, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.477341] env[69328]: DEBUG nova.network.neutron [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a0b663eb-31b0-4de1-94bc-660a7d9c1c7b] Successfully updated port: e5103faf-fa4a-4715-b9eb-8469ebb32b28 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 860.511780] env[69328]: DEBUG oslo_vmware.api [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273429, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.597780] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273432, 'name': CreateVM_Task} progress is 25%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.598866] env[69328]: DEBUG nova.network.neutron [req-7fe5833d-6558-4a01-8b8d-85ef086fa37b req-5ca4ead3-4bc7-47fd-b141-3e1d7126277d service nova] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Updated VIF entry in instance network info cache for port 7748ad51-059a-4dd5-b929-13b3fbac9d5c. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 860.599193] env[69328]: DEBUG nova.network.neutron [req-7fe5833d-6558-4a01-8b8d-85ef086fa37b req-5ca4ead3-4bc7-47fd-b141-3e1d7126277d service nova] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Updating instance_info_cache with network_info: [{"id": "7748ad51-059a-4dd5-b929-13b3fbac9d5c", "address": "fa:16:3e:19:7f:27", "network": {"id": "3ab35a35-3138-447c-be5b-076dd28b923b", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-953121151-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ff1b8afdf994141b20b41c0a4088101", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7748ad51-05", "ovs_interfaceid": "7748ad51-059a-4dd5-b929-13b3fbac9d5c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 860.633071] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2e298255-278a-43c8-b6fe-8da33338788a tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.079s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 860.633071] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b57174cd-2d79-439f-8684-07cfdbfd919b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.720s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 860.633071] env[69328]: DEBUG nova.objects.instance [None req-b57174cd-2d79-439f-8684-07cfdbfd919b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lazy-loading 'resources' on Instance uuid 55f44102-2891-4b6c-b31e-e8255a24d180 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 860.754023] env[69328]: DEBUG nova.compute.manager [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 860.783700] env[69328]: INFO nova.scheduler.client.report [None req-2e298255-278a-43c8-b6fe-8da33338788a tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Deleted allocations for instance ef7effe4-b37f-4fab-ad24-9d8f72a47ee2 [ 860.901766] env[69328]: DEBUG oslo_vmware.api [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Task: {'id': task-3273434, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.982602] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "refresh_cache-a0b663eb-31b0-4de1-94bc-660a7d9c1c7b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 860.983458] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquired lock "refresh_cache-a0b663eb-31b0-4de1-94bc-660a7d9c1c7b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 860.983458] env[69328]: DEBUG nova.network.neutron [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a0b663eb-31b0-4de1-94bc-660a7d9c1c7b] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 861.005637] env[69328]: DEBUG oslo_vmware.api [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273429, 'name': ReconfigVM_Task, 'duration_secs': 1.637171} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.006118] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Reconfigured VM instance instance-0000003e to attach disk [datastore2] fd72bae3-cb72-48d0-a0df-9ea3a770a86c/fd72bae3-cb72-48d0-a0df-9ea3a770a86c.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 861.006940] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-36acc1f8-628d-44b2-9349-55ae92abc238 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.015774] env[69328]: DEBUG oslo_vmware.api [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 861.015774] env[69328]: value = "task-3273435" [ 861.015774] env[69328]: _type = "Task" [ 861.015774] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.030708] env[69328]: DEBUG oslo_vmware.api [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273435, 'name': Rename_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.096996] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273432, 'name': CreateVM_Task, 'duration_secs': 1.348219} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.097235] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 861.097952] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 861.098150] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 861.098489] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 861.098811] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70f5fa15-ce38-42bc-a506-6d3b696571ce {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.102282] env[69328]: DEBUG oslo_concurrency.lockutils [req-7fe5833d-6558-4a01-8b8d-85ef086fa37b req-5ca4ead3-4bc7-47fd-b141-3e1d7126277d service nova] Releasing lock "refresh_cache-55d9ba65-e5c8-446a-a209-a840f30ff02c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 861.104302] env[69328]: DEBUG oslo_vmware.api [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Waiting for the task: (returnval){ [ 861.104302] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ce57c8-1606-f4b7-cf71-99ee315a7ef2" [ 861.104302] env[69328]: _type = "Task" [ 861.104302] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.112596] env[69328]: DEBUG oslo_vmware.api [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ce57c8-1606-f4b7-cf71-99ee315a7ef2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.277776] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 861.296910] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2e298255-278a-43c8-b6fe-8da33338788a tempest-AttachInterfacesV270Test-432502271 tempest-AttachInterfacesV270Test-432502271-project-member] Lock "ef7effe4-b37f-4fab-ad24-9d8f72a47ee2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.879s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 861.399172] env[69328]: DEBUG oslo_vmware.api [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Task: {'id': task-3273434, 'name': ReconfigVM_Task, 'duration_secs': 0.584763} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.401959] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Reconfigured VM instance instance-0000003d to attach disk [datastore2] e5a2de79-cfbc-4d9c-8b58-5aa819657978/e5a2de79-cfbc-4d9c-8b58-5aa819657978.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 861.403033] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-26067994-8bb3-4bf7-9cdd-8f8f4fce0315 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.411111] env[69328]: DEBUG oslo_vmware.api [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Waiting for the task: (returnval){ [ 861.411111] env[69328]: value = "task-3273436" [ 861.411111] env[69328]: _type = "Task" [ 861.411111] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.423546] env[69328]: DEBUG oslo_vmware.api [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Task: {'id': task-3273436, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.521957] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b21071e-d7bc-4a16-8eec-6876e5c27080 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.530671] env[69328]: DEBUG oslo_vmware.api [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273435, 'name': Rename_Task, 'duration_secs': 0.175404} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.531539] env[69328]: DEBUG nova.network.neutron [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a0b663eb-31b0-4de1-94bc-660a7d9c1c7b] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 861.535415] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 861.535758] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e635aad8-1a86-4712-8f02-0dc9a6ac048b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.538476] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f829ee63-51b8-400a-81ce-dd52c7de78e0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.577289] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfa67756-a4e5-4b87-9592-4319f195172e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.580137] env[69328]: DEBUG oslo_vmware.api [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 861.580137] env[69328]: value = "task-3273437" [ 861.580137] env[69328]: _type = "Task" [ 861.580137] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.592027] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab276f1b-507b-4ea6-94d5-d0b9e24c8d8d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.596929] env[69328]: DEBUG oslo_vmware.api [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273437, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.609023] env[69328]: DEBUG nova.compute.provider_tree [None req-b57174cd-2d79-439f-8684-07cfdbfd919b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 861.625121] env[69328]: DEBUG oslo_vmware.api [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ce57c8-1606-f4b7-cf71-99ee315a7ef2, 'name': SearchDatastore_Task, 'duration_secs': 0.015678} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.625497] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 861.625700] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 861.625955] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 861.626122] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 861.626349] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 861.626629] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4f2b941e-e821-4db6-b0ae-fce736891a59 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.641471] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 861.641695] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 861.642650] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df493734-7568-4813-acbd-43d81d41ce23 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.650844] env[69328]: DEBUG oslo_vmware.api [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Waiting for the task: (returnval){ [ 861.650844] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e218cf-650c-8858-003a-1b4758302c97" [ 861.650844] env[69328]: _type = "Task" [ 861.650844] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.660765] env[69328]: DEBUG oslo_vmware.api [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e218cf-650c-8858-003a-1b4758302c97, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.829874] env[69328]: DEBUG nova.compute.manager [req-f9f8821f-09f5-4de8-848b-8c8a7031f2dd req-f6825f7a-640d-494d-b531-127d7afedee9 service nova] [instance: a0b663eb-31b0-4de1-94bc-660a7d9c1c7b] Received event network-vif-plugged-e5103faf-fa4a-4715-b9eb-8469ebb32b28 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 861.830128] env[69328]: DEBUG oslo_concurrency.lockutils [req-f9f8821f-09f5-4de8-848b-8c8a7031f2dd req-f6825f7a-640d-494d-b531-127d7afedee9 service nova] Acquiring lock "a0b663eb-31b0-4de1-94bc-660a7d9c1c7b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 861.830343] env[69328]: DEBUG oslo_concurrency.lockutils [req-f9f8821f-09f5-4de8-848b-8c8a7031f2dd req-f6825f7a-640d-494d-b531-127d7afedee9 service nova] Lock "a0b663eb-31b0-4de1-94bc-660a7d9c1c7b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 861.830551] env[69328]: DEBUG oslo_concurrency.lockutils [req-f9f8821f-09f5-4de8-848b-8c8a7031f2dd req-f6825f7a-640d-494d-b531-127d7afedee9 service nova] Lock "a0b663eb-31b0-4de1-94bc-660a7d9c1c7b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 861.830686] env[69328]: DEBUG nova.compute.manager [req-f9f8821f-09f5-4de8-848b-8c8a7031f2dd req-f6825f7a-640d-494d-b531-127d7afedee9 service nova] [instance: a0b663eb-31b0-4de1-94bc-660a7d9c1c7b] No waiting events found dispatching network-vif-plugged-e5103faf-fa4a-4715-b9eb-8469ebb32b28 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 861.830853] env[69328]: WARNING nova.compute.manager [req-f9f8821f-09f5-4de8-848b-8c8a7031f2dd req-f6825f7a-640d-494d-b531-127d7afedee9 service nova] [instance: a0b663eb-31b0-4de1-94bc-660a7d9c1c7b] Received unexpected event network-vif-plugged-e5103faf-fa4a-4715-b9eb-8469ebb32b28 for instance with vm_state building and task_state spawning. [ 861.831025] env[69328]: DEBUG nova.compute.manager [req-f9f8821f-09f5-4de8-848b-8c8a7031f2dd req-f6825f7a-640d-494d-b531-127d7afedee9 service nova] [instance: a0b663eb-31b0-4de1-94bc-660a7d9c1c7b] Received event network-changed-e5103faf-fa4a-4715-b9eb-8469ebb32b28 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 861.831557] env[69328]: DEBUG nova.compute.manager [req-f9f8821f-09f5-4de8-848b-8c8a7031f2dd req-f6825f7a-640d-494d-b531-127d7afedee9 service nova] [instance: a0b663eb-31b0-4de1-94bc-660a7d9c1c7b] Refreshing instance network info cache due to event network-changed-e5103faf-fa4a-4715-b9eb-8469ebb32b28. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 861.831557] env[69328]: DEBUG oslo_concurrency.lockutils [req-f9f8821f-09f5-4de8-848b-8c8a7031f2dd req-f6825f7a-640d-494d-b531-127d7afedee9 service nova] Acquiring lock "refresh_cache-a0b663eb-31b0-4de1-94bc-660a7d9c1c7b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 861.834219] env[69328]: DEBUG nova.network.neutron [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a0b663eb-31b0-4de1-94bc-660a7d9c1c7b] Updating instance_info_cache with network_info: [{"id": "e5103faf-fa4a-4715-b9eb-8469ebb32b28", "address": "fa:16:3e:4a:bc:cf", "network": {"id": "7f5dcab4-3cec-42a1-b589-88cb373af645", "bridge": "br-int", "label": "tempest-ServersTestJSON-1833802368-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d86de4d5055642aa86a29c6768e3db46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b94712a6-b777-47dd-bc06-f9acfce2d936", "external-id": "nsx-vlan-transportzone-494", "segmentation_id": 494, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape5103faf-fa", "ovs_interfaceid": "e5103faf-fa4a-4715-b9eb-8469ebb32b28", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 861.923780] env[69328]: DEBUG oslo_vmware.api [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Task: {'id': task-3273436, 'name': Rename_Task, 'duration_secs': 0.216742} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.924917] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 861.924917] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-521dfcbf-04bc-4f58-b995-c7dc461e126f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.932489] env[69328]: DEBUG oslo_vmware.api [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Waiting for the task: (returnval){ [ 861.932489] env[69328]: value = "task-3273438" [ 861.932489] env[69328]: _type = "Task" [ 861.932489] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.942805] env[69328]: DEBUG oslo_vmware.api [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Task: {'id': task-3273438, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.094348] env[69328]: DEBUG oslo_vmware.api [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273437, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.115765] env[69328]: DEBUG nova.scheduler.client.report [None req-b57174cd-2d79-439f-8684-07cfdbfd919b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 862.165027] env[69328]: DEBUG oslo_vmware.api [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e218cf-650c-8858-003a-1b4758302c97, 'name': SearchDatastore_Task, 'duration_secs': 0.012787} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.165939] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b79c6aa4-9446-48b4-924d-8b759ac036e3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.172669] env[69328]: DEBUG oslo_vmware.api [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Waiting for the task: (returnval){ [ 862.172669] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e6fef9-d1ee-4de4-57d4-fa7de1b3faf2" [ 862.172669] env[69328]: _type = "Task" [ 862.172669] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.184184] env[69328]: DEBUG oslo_vmware.api [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e6fef9-d1ee-4de4-57d4-fa7de1b3faf2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.336367] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Releasing lock "refresh_cache-a0b663eb-31b0-4de1-94bc-660a7d9c1c7b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 862.336727] env[69328]: DEBUG nova.compute.manager [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a0b663eb-31b0-4de1-94bc-660a7d9c1c7b] Instance network_info: |[{"id": "e5103faf-fa4a-4715-b9eb-8469ebb32b28", "address": "fa:16:3e:4a:bc:cf", "network": {"id": "7f5dcab4-3cec-42a1-b589-88cb373af645", "bridge": "br-int", "label": "tempest-ServersTestJSON-1833802368-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d86de4d5055642aa86a29c6768e3db46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b94712a6-b777-47dd-bc06-f9acfce2d936", "external-id": "nsx-vlan-transportzone-494", "segmentation_id": 494, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape5103faf-fa", "ovs_interfaceid": "e5103faf-fa4a-4715-b9eb-8469ebb32b28", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 862.337067] env[69328]: DEBUG oslo_concurrency.lockutils [req-f9f8821f-09f5-4de8-848b-8c8a7031f2dd req-f6825f7a-640d-494d-b531-127d7afedee9 service nova] Acquired lock "refresh_cache-a0b663eb-31b0-4de1-94bc-660a7d9c1c7b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 862.337280] env[69328]: DEBUG nova.network.neutron [req-f9f8821f-09f5-4de8-848b-8c8a7031f2dd req-f6825f7a-640d-494d-b531-127d7afedee9 service nova] [instance: a0b663eb-31b0-4de1-94bc-660a7d9c1c7b] Refreshing network info cache for port e5103faf-fa4a-4715-b9eb-8469ebb32b28 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 862.338588] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a0b663eb-31b0-4de1-94bc-660a7d9c1c7b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4a:bc:cf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b94712a6-b777-47dd-bc06-f9acfce2d936', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e5103faf-fa4a-4715-b9eb-8469ebb32b28', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 862.346411] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Creating folder: Project (d86de4d5055642aa86a29c6768e3db46). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 862.347030] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9e5dbaf8-e940-4bf5-8b17-b46973eb976e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.360217] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Created folder: Project (d86de4d5055642aa86a29c6768e3db46) in parent group-v653649. [ 862.360446] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Creating folder: Instances. Parent ref: group-v653836. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 862.361070] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c2690ba2-03f8-4223-8dc8-6af1bb7917a5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.373080] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Created folder: Instances in parent group-v653836. [ 862.373080] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 862.374561] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a0b663eb-31b0-4de1-94bc-660a7d9c1c7b] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 862.374834] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-48fdb519-e5d2-471a-be04-0f6408cb86cc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.403470] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 862.403470] env[69328]: value = "task-3273441" [ 862.403470] env[69328]: _type = "Task" [ 862.403470] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.413567] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273441, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.443351] env[69328]: DEBUG oslo_vmware.api [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Task: {'id': task-3273438, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.493141] env[69328]: DEBUG oslo_vmware.rw_handles [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52afc428-3bf1-d962-d483-40ef1c558110/disk-0.vmdk. {{(pid=69328) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 862.494390] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0b4b96b-f608-43cb-b143-6757c03819d7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.501936] env[69328]: DEBUG oslo_vmware.rw_handles [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52afc428-3bf1-d962-d483-40ef1c558110/disk-0.vmdk is in state: ready. {{(pid=69328) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 862.502291] env[69328]: ERROR oslo_vmware.rw_handles [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52afc428-3bf1-d962-d483-40ef1c558110/disk-0.vmdk due to incomplete transfer. [ 862.502625] env[69328]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-97b0e745-3691-4466-b98a-db81bb345ca5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.510952] env[69328]: DEBUG oslo_vmware.rw_handles [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52afc428-3bf1-d962-d483-40ef1c558110/disk-0.vmdk. {{(pid=69328) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 862.511348] env[69328]: DEBUG nova.virt.vmwareapi.images [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Uploaded image bbfa0ebf-9220-4057-9a85-eb18d82d6eae to the Glance image server {{(pid=69328) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 862.513875] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Destroying the VM {{(pid=69328) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 862.514227] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-8f8892bc-4ab5-490d-93d7-a5e3085628c9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.524020] env[69328]: DEBUG oslo_vmware.api [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 862.524020] env[69328]: value = "task-3273442" [ 862.524020] env[69328]: _type = "Task" [ 862.524020] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.535110] env[69328]: DEBUG oslo_vmware.api [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273442, 'name': Destroy_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.591930] env[69328]: DEBUG oslo_vmware.api [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273437, 'name': PowerOnVM_Task, 'duration_secs': 0.738388} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.591930] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 862.591930] env[69328]: INFO nova.compute.manager [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Took 10.21 seconds to spawn the instance on the hypervisor. [ 862.591930] env[69328]: DEBUG nova.compute.manager [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 862.592403] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b1bb99b-d5e9-4930-8e5b-fc49c9426594 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.620944] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b57174cd-2d79-439f-8684-07cfdbfd919b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.989s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 862.622709] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.248s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 862.628904] env[69328]: INFO nova.compute.claims [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] [instance: 7232ad5c-9f4e-425e-824a-4c3750f665eb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 862.686314] env[69328]: DEBUG oslo_vmware.api [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e6fef9-d1ee-4de4-57d4-fa7de1b3faf2, 'name': SearchDatastore_Task, 'duration_secs': 0.018316} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.686586] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 862.686855] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 55d9ba65-e5c8-446a-a209-a840f30ff02c/55d9ba65-e5c8-446a-a209-a840f30ff02c.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 862.687141] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-013089f9-1393-4661-a7dd-58fba22885ae {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.695306] env[69328]: DEBUG oslo_vmware.api [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Waiting for the task: (returnval){ [ 862.695306] env[69328]: value = "task-3273443" [ 862.695306] env[69328]: _type = "Task" [ 862.695306] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.706467] env[69328]: DEBUG oslo_vmware.api [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Task: {'id': task-3273443, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.729909] env[69328]: INFO nova.scheduler.client.report [None req-b57174cd-2d79-439f-8684-07cfdbfd919b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Deleted allocations for instance 55f44102-2891-4b6c-b31e-e8255a24d180 [ 862.915528] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273441, 'name': CreateVM_Task, 'duration_secs': 0.382093} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.915528] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a0b663eb-31b0-4de1-94bc-660a7d9c1c7b] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 862.915860] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 862.916036] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 862.916386] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 862.916698] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec09a129-0842-43b9-8dcb-124a35ab0a7e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.922518] env[69328]: DEBUG oslo_vmware.api [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 862.922518] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b4fbb1-b2c9-f0e1-63e6-5ab18febe627" [ 862.922518] env[69328]: _type = "Task" [ 862.922518] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.933546] env[69328]: DEBUG oslo_vmware.api [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b4fbb1-b2c9-f0e1-63e6-5ab18febe627, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.943141] env[69328]: DEBUG oslo_vmware.api [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Task: {'id': task-3273438, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.034660] env[69328]: DEBUG oslo_vmware.api [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273442, 'name': Destroy_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.113765] env[69328]: INFO nova.compute.manager [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Took 47.34 seconds to build instance. [ 863.169128] env[69328]: DEBUG nova.network.neutron [req-f9f8821f-09f5-4de8-848b-8c8a7031f2dd req-f6825f7a-640d-494d-b531-127d7afedee9 service nova] [instance: a0b663eb-31b0-4de1-94bc-660a7d9c1c7b] Updated VIF entry in instance network info cache for port e5103faf-fa4a-4715-b9eb-8469ebb32b28. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 863.169593] env[69328]: DEBUG nova.network.neutron [req-f9f8821f-09f5-4de8-848b-8c8a7031f2dd req-f6825f7a-640d-494d-b531-127d7afedee9 service nova] [instance: a0b663eb-31b0-4de1-94bc-660a7d9c1c7b] Updating instance_info_cache with network_info: [{"id": "e5103faf-fa4a-4715-b9eb-8469ebb32b28", "address": "fa:16:3e:4a:bc:cf", "network": {"id": "7f5dcab4-3cec-42a1-b589-88cb373af645", "bridge": "br-int", "label": "tempest-ServersTestJSON-1833802368-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d86de4d5055642aa86a29c6768e3db46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b94712a6-b777-47dd-bc06-f9acfce2d936", "external-id": "nsx-vlan-transportzone-494", "segmentation_id": 494, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape5103faf-fa", "ovs_interfaceid": "e5103faf-fa4a-4715-b9eb-8469ebb32b28", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 863.208692] env[69328]: DEBUG oslo_vmware.api [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Task: {'id': task-3273443, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.237562] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b57174cd-2d79-439f-8684-07cfdbfd919b tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "55f44102-2891-4b6c-b31e-e8255a24d180" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.233s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 863.433947] env[69328]: DEBUG oslo_vmware.api [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b4fbb1-b2c9-f0e1-63e6-5ab18febe627, 'name': SearchDatastore_Task, 'duration_secs': 0.067934} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.434374] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 863.434630] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a0b663eb-31b0-4de1-94bc-660a7d9c1c7b] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 863.434872] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 863.435100] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 863.435314] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 863.438774] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-70180337-bb09-4111-b6c0-77b643a9f0b9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.447029] env[69328]: DEBUG oslo_vmware.api [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Task: {'id': task-3273438, 'name': PowerOnVM_Task, 'duration_secs': 1.452114} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.447494] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 863.447707] env[69328]: DEBUG nova.compute.manager [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 863.448062] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 863.448228] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 863.449407] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59516160-a38e-4452-8bbc-d1aecd463a9e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.451835] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2df4f1d4-2afa-45aa-b1af-a93737e34535 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.461155] env[69328]: DEBUG oslo_vmware.api [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 863.461155] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52c61216-8c58-3339-88d5-8973c0b6d941" [ 863.461155] env[69328]: _type = "Task" [ 863.461155] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.470079] env[69328]: DEBUG oslo_vmware.api [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52c61216-8c58-3339-88d5-8973c0b6d941, 'name': SearchDatastore_Task, 'duration_secs': 0.010449} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.470818] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-357d8fbe-2c15-40be-9aba-7c785580982c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.476019] env[69328]: DEBUG oslo_vmware.api [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 863.476019] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52be2101-dec5-f692-cf26-362612a56feb" [ 863.476019] env[69328]: _type = "Task" [ 863.476019] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.484553] env[69328]: DEBUG oslo_vmware.api [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52be2101-dec5-f692-cf26-362612a56feb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.532070] env[69328]: DEBUG oslo_vmware.api [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273442, 'name': Destroy_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.616463] env[69328]: DEBUG oslo_concurrency.lockutils [None req-656379df-1b50-4db2-b05b-f0dc5d2f4ff9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "fd72bae3-cb72-48d0-a0df-9ea3a770a86c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.282s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 863.675257] env[69328]: DEBUG oslo_concurrency.lockutils [req-f9f8821f-09f5-4de8-848b-8c8a7031f2dd req-f6825f7a-640d-494d-b531-127d7afedee9 service nova] Releasing lock "refresh_cache-a0b663eb-31b0-4de1-94bc-660a7d9c1c7b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 863.710405] env[69328]: DEBUG oslo_vmware.api [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Task: {'id': task-3273443, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.539606} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.710861] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 55d9ba65-e5c8-446a-a209-a840f30ff02c/55d9ba65-e5c8-446a-a209-a840f30ff02c.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 863.711058] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 863.711348] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5d9fbb3b-042f-4d3c-9245-c8e5e03307fc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.724153] env[69328]: DEBUG oslo_vmware.api [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Waiting for the task: (returnval){ [ 863.724153] env[69328]: value = "task-3273444" [ 863.724153] env[69328]: _type = "Task" [ 863.724153] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.736482] env[69328]: DEBUG oslo_vmware.api [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Task: {'id': task-3273444, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.975524] env[69328]: DEBUG oslo_concurrency.lockutils [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 863.986709] env[69328]: DEBUG oslo_vmware.api [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52be2101-dec5-f692-cf26-362612a56feb, 'name': SearchDatastore_Task, 'duration_secs': 0.010417} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.989035] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 863.989343] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] a0b663eb-31b0-4de1-94bc-660a7d9c1c7b/a0b663eb-31b0-4de1-94bc-660a7d9c1c7b.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 863.989779] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a7ff0fe3-a4b3-4315-82e0-251feab1b0ba {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.996623] env[69328]: DEBUG oslo_vmware.api [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 863.996623] env[69328]: value = "task-3273445" [ 863.996623] env[69328]: _type = "Task" [ 863.996623] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.008842] env[69328]: DEBUG oslo_vmware.api [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273445, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.035181] env[69328]: DEBUG oslo_vmware.api [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273442, 'name': Destroy_Task, 'duration_secs': 1.366004} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.037574] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Destroyed the VM [ 864.037744] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Deleting Snapshot of the VM instance {{(pid=69328) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 864.039550] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-0b5e0a84-f4f7-4fe2-9464-9657dcd50355 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.047494] env[69328]: DEBUG oslo_vmware.api [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 864.047494] env[69328]: value = "task-3273446" [ 864.047494] env[69328]: _type = "Task" [ 864.047494] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.060227] env[69328]: DEBUG oslo_vmware.api [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273446, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.102617] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a085275-7fb3-47f5-8909-a93a3d3a0f1f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.111207] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a67adfa-2ca9-47a7-86fa-49a0cf4d60eb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.147439] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc2a22f3-db0f-46fe-ac68-d46622f90546 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.157477] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38a5aca9-d179-467d-ab66-b09597a3e535 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.175168] env[69328]: DEBUG nova.compute.provider_tree [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 864.235948] env[69328]: DEBUG oslo_vmware.api [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Task: {'id': task-3273444, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072234} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.236316] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 864.237217] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1ceca5e-977e-4576-a4b1-f8106de488af {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.266023] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Reconfiguring VM instance instance-0000003f to attach disk [datastore2] 55d9ba65-e5c8-446a-a209-a840f30ff02c/55d9ba65-e5c8-446a-a209-a840f30ff02c.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 864.266975] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7c651111-fe5b-4066-8157-c2b8002c189a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.294143] env[69328]: DEBUG oslo_vmware.api [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Waiting for the task: (returnval){ [ 864.294143] env[69328]: value = "task-3273447" [ 864.294143] env[69328]: _type = "Task" [ 864.294143] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.305274] env[69328]: DEBUG oslo_vmware.api [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Task: {'id': task-3273447, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.508481] env[69328]: DEBUG oslo_vmware.api [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273445, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.558511] env[69328]: DEBUG oslo_vmware.api [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273446, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.678623] env[69328]: DEBUG nova.scheduler.client.report [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 864.810411] env[69328]: DEBUG oslo_vmware.api [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Task: {'id': task-3273447, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.014026] env[69328]: DEBUG oslo_vmware.api [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273445, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.520331} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.014026] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] a0b663eb-31b0-4de1-94bc-660a7d9c1c7b/a0b663eb-31b0-4de1-94bc-660a7d9c1c7b.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 865.014026] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a0b663eb-31b0-4de1-94bc-660a7d9c1c7b] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 865.014026] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-33c9406d-b33c-4f8e-af9d-bbcc22b62005 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.022063] env[69328]: DEBUG oslo_vmware.api [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 865.022063] env[69328]: value = "task-3273448" [ 865.022063] env[69328]: _type = "Task" [ 865.022063] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.030054] env[69328]: DEBUG oslo_vmware.api [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273448, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.060057] env[69328]: DEBUG oslo_vmware.api [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273446, 'name': RemoveSnapshot_Task, 'duration_secs': 0.586237} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.060346] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Deleted Snapshot of the VM instance {{(pid=69328) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 865.060640] env[69328]: DEBUG nova.compute.manager [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 865.061576] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cc35b8e-953b-4808-8b50-0784c7faa629 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.109192] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f90bad43-d8d9-49f5-9ed7-78c713b3551a tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Acquiring lock "e5a2de79-cfbc-4d9c-8b58-5aa819657978" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 865.110644] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f90bad43-d8d9-49f5-9ed7-78c713b3551a tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Lock "e5a2de79-cfbc-4d9c-8b58-5aa819657978" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 865.110644] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f90bad43-d8d9-49f5-9ed7-78c713b3551a tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Acquiring lock "e5a2de79-cfbc-4d9c-8b58-5aa819657978-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 865.110644] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f90bad43-d8d9-49f5-9ed7-78c713b3551a tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Lock "e5a2de79-cfbc-4d9c-8b58-5aa819657978-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 865.110644] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f90bad43-d8d9-49f5-9ed7-78c713b3551a tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Lock "e5a2de79-cfbc-4d9c-8b58-5aa819657978-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 865.112250] env[69328]: INFO nova.compute.manager [None req-f90bad43-d8d9-49f5-9ed7-78c713b3551a tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Terminating instance [ 865.187882] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.562s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 865.187882] env[69328]: DEBUG nova.compute.manager [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] [instance: 7232ad5c-9f4e-425e-824a-4c3750f665eb] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 865.188952] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 33.986s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 865.188952] env[69328]: DEBUG nova.objects.instance [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69328) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 865.309173] env[69328]: DEBUG oslo_vmware.api [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Task: {'id': task-3273447, 'name': ReconfigVM_Task, 'duration_secs': 0.648641} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.310980] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Reconfigured VM instance instance-0000003f to attach disk [datastore2] 55d9ba65-e5c8-446a-a209-a840f30ff02c/55d9ba65-e5c8-446a-a209-a840f30ff02c.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 865.312034] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b0c03f9b-1f3f-4346-be7f-d97148d53418 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.320258] env[69328]: DEBUG oslo_vmware.api [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Waiting for the task: (returnval){ [ 865.320258] env[69328]: value = "task-3273449" [ 865.320258] env[69328]: _type = "Task" [ 865.320258] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.334788] env[69328]: DEBUG oslo_vmware.api [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Task: {'id': task-3273449, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.533152] env[69328]: DEBUG oslo_vmware.api [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273448, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076865} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.533152] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a0b663eb-31b0-4de1-94bc-660a7d9c1c7b] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 865.533996] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6836b7e5-70ce-4812-878a-f8ce2f455d0b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.558663] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a0b663eb-31b0-4de1-94bc-660a7d9c1c7b] Reconfiguring VM instance instance-00000040 to attach disk [datastore2] a0b663eb-31b0-4de1-94bc-660a7d9c1c7b/a0b663eb-31b0-4de1-94bc-660a7d9c1c7b.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 865.558982] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-40d11f06-96f8-406c-b31f-84814f882c78 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.581142] env[69328]: INFO nova.compute.manager [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Shelve offloading [ 865.585266] env[69328]: DEBUG oslo_vmware.api [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 865.585266] env[69328]: value = "task-3273450" [ 865.585266] env[69328]: _type = "Task" [ 865.585266] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.592783] env[69328]: DEBUG oslo_vmware.api [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273450, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.616949] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f90bad43-d8d9-49f5-9ed7-78c713b3551a tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Acquiring lock "refresh_cache-e5a2de79-cfbc-4d9c-8b58-5aa819657978" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.617147] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f90bad43-d8d9-49f5-9ed7-78c713b3551a tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Acquired lock "refresh_cache-e5a2de79-cfbc-4d9c-8b58-5aa819657978" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 865.617332] env[69328]: DEBUG nova.network.neutron [None req-f90bad43-d8d9-49f5-9ed7-78c713b3551a tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 865.693918] env[69328]: DEBUG nova.compute.utils [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 865.695237] env[69328]: DEBUG nova.compute.manager [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] [instance: 7232ad5c-9f4e-425e-824a-4c3750f665eb] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 865.695407] env[69328]: DEBUG nova.network.neutron [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] [instance: 7232ad5c-9f4e-425e-824a-4c3750f665eb] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 865.826196] env[69328]: DEBUG nova.policy [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '604c163cb3cc48edabf53f23436f6bea', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b1b18725f7ab4f4dbc5fa03eaaf032e3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 865.833761] env[69328]: DEBUG oslo_vmware.api [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Task: {'id': task-3273449, 'name': Rename_Task, 'duration_secs': 0.327216} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.833925] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 865.834223] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-30889b6f-c85d-47df-aafd-f7e8bb274783 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.842237] env[69328]: DEBUG oslo_vmware.api [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Waiting for the task: (returnval){ [ 865.842237] env[69328]: value = "task-3273451" [ 865.842237] env[69328]: _type = "Task" [ 865.842237] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.851300] env[69328]: DEBUG oslo_vmware.api [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Task: {'id': task-3273451, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.085725] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 866.086120] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7ccd1656-6c73-4237-b94e-75dd147ac6af {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.102891] env[69328]: DEBUG oslo_vmware.api [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273450, 'name': ReconfigVM_Task, 'duration_secs': 0.27626} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.102891] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a0b663eb-31b0-4de1-94bc-660a7d9c1c7b] Reconfigured VM instance instance-00000040 to attach disk [datastore2] a0b663eb-31b0-4de1-94bc-660a7d9c1c7b/a0b663eb-31b0-4de1-94bc-660a7d9c1c7b.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 866.102891] env[69328]: DEBUG oslo_vmware.api [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 866.102891] env[69328]: value = "task-3273452" [ 866.102891] env[69328]: _type = "Task" [ 866.102891] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.102891] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a65c873a-a9c3-4692-8892-c50fbda0277f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.115305] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] VM already powered off {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 866.115539] env[69328]: DEBUG nova.compute.manager [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 866.115893] env[69328]: DEBUG oslo_vmware.api [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 866.115893] env[69328]: value = "task-3273453" [ 866.115893] env[69328]: _type = "Task" [ 866.115893] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.116617] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dbe6e0d-d693-4372-a19c-0d85e56ffb22 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.128650] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "refresh_cache-6ccd0715-0903-4fed-bf80-240f386e4ad8" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.128803] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquired lock "refresh_cache-6ccd0715-0903-4fed-bf80-240f386e4ad8" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 866.129249] env[69328]: DEBUG nova.network.neutron [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 866.133758] env[69328]: DEBUG oslo_vmware.api [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273453, 'name': Rename_Task} progress is 10%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.147533] env[69328]: DEBUG nova.network.neutron [None req-f90bad43-d8d9-49f5-9ed7-78c713b3551a tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 866.199038] env[69328]: DEBUG nova.compute.manager [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] [instance: 7232ad5c-9f4e-425e-824a-4c3750f665eb] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 866.207478] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4ea26fc9-46a9-4195-a05a-5a60836d98a4 tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.019s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 866.209673] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9c8fce6a-5227-4ccd-aaeb-edeccfb2e06e tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.767s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 866.211792] env[69328]: DEBUG nova.objects.instance [None req-9c8fce6a-5227-4ccd-aaeb-edeccfb2e06e tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Lazy-loading 'resources' on Instance uuid b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 866.230440] env[69328]: DEBUG nova.network.neutron [None req-f90bad43-d8d9-49f5-9ed7-78c713b3551a tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 866.354242] env[69328]: DEBUG oslo_vmware.api [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Task: {'id': task-3273451, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.446544] env[69328]: DEBUG nova.network.neutron [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] [instance: 7232ad5c-9f4e-425e-824a-4c3750f665eb] Successfully created port: 0ccfb49b-7358-4b6d-ae9b-bf36dac84b40 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 866.453039] env[69328]: DEBUG nova.compute.manager [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Stashing vm_state: active {{(pid=69328) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 866.632017] env[69328]: DEBUG oslo_vmware.api [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273453, 'name': Rename_Task, 'duration_secs': 0.16902} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.632017] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a0b663eb-31b0-4de1-94bc-660a7d9c1c7b] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 866.632479] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-91c56b60-4570-4634-a83f-87ca4df8204e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.640525] env[69328]: DEBUG oslo_vmware.api [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 866.640525] env[69328]: value = "task-3273454" [ 866.640525] env[69328]: _type = "Task" [ 866.640525] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.654703] env[69328]: DEBUG oslo_vmware.api [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273454, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.733313] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f90bad43-d8d9-49f5-9ed7-78c713b3551a tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Releasing lock "refresh_cache-e5a2de79-cfbc-4d9c-8b58-5aa819657978" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 866.734034] env[69328]: DEBUG nova.compute.manager [None req-f90bad43-d8d9-49f5-9ed7-78c713b3551a tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 866.734034] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f90bad43-d8d9-49f5-9ed7-78c713b3551a tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 866.738023] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a32088e-309e-4c5a-ad3e-5daf3e974cbd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.744404] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f90bad43-d8d9-49f5-9ed7-78c713b3551a tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 866.744783] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-893124b2-5615-46a5-8a92-d5b1b6e731f0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.751995] env[69328]: DEBUG oslo_vmware.api [None req-f90bad43-d8d9-49f5-9ed7-78c713b3551a tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Waiting for the task: (returnval){ [ 866.751995] env[69328]: value = "task-3273455" [ 866.751995] env[69328]: _type = "Task" [ 866.751995] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.773459] env[69328]: DEBUG oslo_vmware.api [None req-f90bad43-d8d9-49f5-9ed7-78c713b3551a tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Task: {'id': task-3273455, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.858369] env[69328]: DEBUG oslo_vmware.api [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Task: {'id': task-3273451, 'name': PowerOnVM_Task, 'duration_secs': 0.654347} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.858706] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 866.858845] env[69328]: INFO nova.compute.manager [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Took 9.88 seconds to spawn the instance on the hypervisor. [ 866.859031] env[69328]: DEBUG nova.compute.manager [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 866.859881] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3174ec0b-13a4-4a0d-a1dd-b9a977e89f8f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.997295] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 866.998542] env[69328]: DEBUG nova.network.neutron [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Updating instance_info_cache with network_info: [{"id": "79aa6a07-f43a-499b-9989-2017b35d1615", "address": "fa:16:3e:9d:9e:a9", "network": {"id": "620f277d-ca49-41da-83a0-afb8c393e26e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1223326239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cdc479a290524130b9d17e627a64b65a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86a35d07-53d3-46b3-92cb-ae34236c0f41", "external-id": "nsx-vlan-transportzone-811", "segmentation_id": 811, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79aa6a07-f4", "ovs_interfaceid": "79aa6a07-f43a-499b-9989-2017b35d1615", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 867.158926] env[69328]: DEBUG oslo_vmware.api [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273454, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.187397] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "3ba646e8-a5c8-4917-a1c4-32b37affb598" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 867.187820] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "3ba646e8-a5c8-4917-a1c4-32b37affb598" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 867.222982] env[69328]: DEBUG nova.compute.manager [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] [instance: 7232ad5c-9f4e-425e-824a-4c3750f665eb] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 867.263079] env[69328]: DEBUG oslo_vmware.api [None req-f90bad43-d8d9-49f5-9ed7-78c713b3551a tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Task: {'id': task-3273455, 'name': PowerOffVM_Task, 'duration_secs': 0.213892} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.263671] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f90bad43-d8d9-49f5-9ed7-78c713b3551a tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 867.264390] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f90bad43-d8d9-49f5-9ed7-78c713b3551a tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 867.268023] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-15a4f33b-bac7-4042-80f1-4fec9bac12a7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.285157] env[69328]: DEBUG nova.virt.hardware [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 867.285377] env[69328]: DEBUG nova.virt.hardware [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 867.285497] env[69328]: DEBUG nova.virt.hardware [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 867.285736] env[69328]: DEBUG nova.virt.hardware [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 867.285817] env[69328]: DEBUG nova.virt.hardware [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 867.285962] env[69328]: DEBUG nova.virt.hardware [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 867.286183] env[69328]: DEBUG nova.virt.hardware [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 867.286337] env[69328]: DEBUG nova.virt.hardware [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 867.286521] env[69328]: DEBUG nova.virt.hardware [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 867.286705] env[69328]: DEBUG nova.virt.hardware [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 867.290031] env[69328]: DEBUG nova.virt.hardware [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 867.290031] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7102c538-d4fa-4d8b-a191-936cbbb351a6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.298232] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f90bad43-d8d9-49f5-9ed7-78c713b3551a tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 867.298395] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f90bad43-d8d9-49f5-9ed7-78c713b3551a tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 867.298507] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-f90bad43-d8d9-49f5-9ed7-78c713b3551a tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Deleting the datastore file [datastore2] e5a2de79-cfbc-4d9c-8b58-5aa819657978 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 867.300946] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-91ac176d-0398-4462-b024-c0529489fb74 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.305025] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d961ab7d-1c17-42aa-859d-b43eda43e425 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.326439] env[69328]: DEBUG oslo_vmware.api [None req-f90bad43-d8d9-49f5-9ed7-78c713b3551a tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Waiting for the task: (returnval){ [ 867.326439] env[69328]: value = "task-3273457" [ 867.326439] env[69328]: _type = "Task" [ 867.326439] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.328304] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5c65ac3-4e9e-40ea-9e61-6f55a75b6d28 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.342533] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0132615-0544-46f5-b089-108ec4540763 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.346178] env[69328]: DEBUG oslo_vmware.api [None req-f90bad43-d8d9-49f5-9ed7-78c713b3551a tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Task: {'id': task-3273457, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.380561] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cd383b6-0deb-407d-9211-6498e5184e0f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.385838] env[69328]: INFO nova.compute.manager [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Took 47.77 seconds to build instance. [ 867.391888] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-202d7e66-11e6-423d-8651-3e4ee8e96c57 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.411508] env[69328]: DEBUG nova.compute.provider_tree [None req-9c8fce6a-5227-4ccd-aaeb-edeccfb2e06e tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 867.502254] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Releasing lock "refresh_cache-6ccd0715-0903-4fed-bf80-240f386e4ad8" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 867.658316] env[69328]: DEBUG oslo_vmware.api [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273454, 'name': PowerOnVM_Task, 'duration_secs': 0.9178} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.658595] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a0b663eb-31b0-4de1-94bc-660a7d9c1c7b] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 867.658801] env[69328]: INFO nova.compute.manager [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a0b663eb-31b0-4de1-94bc-660a7d9c1c7b] Took 8.09 seconds to spawn the instance on the hypervisor. [ 867.658981] env[69328]: DEBUG nova.compute.manager [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a0b663eb-31b0-4de1-94bc-660a7d9c1c7b] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 867.659915] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15b1d1d1-91cc-4f6d-a105-b49c81f79291 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.691448] env[69328]: DEBUG nova.compute.manager [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 867.839335] env[69328]: DEBUG oslo_vmware.api [None req-f90bad43-d8d9-49f5-9ed7-78c713b3551a tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Task: {'id': task-3273457, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.187869} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.839526] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-f90bad43-d8d9-49f5-9ed7-78c713b3551a tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 867.840292] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f90bad43-d8d9-49f5-9ed7-78c713b3551a tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 867.840292] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f90bad43-d8d9-49f5-9ed7-78c713b3551a tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 867.840292] env[69328]: INFO nova.compute.manager [None req-f90bad43-d8d9-49f5-9ed7-78c713b3551a tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Took 1.11 seconds to destroy the instance on the hypervisor. [ 867.840473] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f90bad43-d8d9-49f5-9ed7-78c713b3551a tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 867.841027] env[69328]: DEBUG nova.compute.manager [-] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 867.841027] env[69328]: DEBUG nova.network.neutron [-] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 867.875870] env[69328]: DEBUG nova.network.neutron [-] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 867.889193] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2e04db62-23f6-4b52-bd20-5b629c2c3cac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Lock "55d9ba65-e5c8-446a-a209-a840f30ff02c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.314s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 867.944873] env[69328]: ERROR nova.scheduler.client.report [None req-9c8fce6a-5227-4ccd-aaeb-edeccfb2e06e tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] [req-8acf1698-95a1-403e-97d4-b112070d823d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-8acf1698-95a1-403e-97d4-b112070d823d"}]} [ 867.962018] env[69328]: DEBUG nova.scheduler.client.report [None req-9c8fce6a-5227-4ccd-aaeb-edeccfb2e06e tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Refreshing inventories for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 867.988695] env[69328]: DEBUG nova.scheduler.client.report [None req-9c8fce6a-5227-4ccd-aaeb-edeccfb2e06e tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Updating ProviderTree inventory for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 867.988931] env[69328]: DEBUG nova.compute.provider_tree [None req-9c8fce6a-5227-4ccd-aaeb-edeccfb2e06e tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 868.006244] env[69328]: DEBUG nova.scheduler.client.report [None req-9c8fce6a-5227-4ccd-aaeb-edeccfb2e06e tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Refreshing aggregate associations for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e, aggregates: None {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 868.034118] env[69328]: DEBUG nova.scheduler.client.report [None req-9c8fce6a-5227-4ccd-aaeb-edeccfb2e06e tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Refreshing trait associations for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e, traits: COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 868.182351] env[69328]: INFO nova.compute.manager [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a0b663eb-31b0-4de1-94bc-660a7d9c1c7b] Took 45.82 seconds to build instance. [ 868.216733] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 868.325935] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 868.329455] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71e8a048-68b1-4ee2-837d-ba8562c6ae4a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.337889] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 868.340602] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-13832487-7706-453b-950f-a596f3d995e3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.380815] env[69328]: DEBUG nova.network.neutron [-] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 868.435146] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 868.435380] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 868.435550] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Deleting the datastore file [datastore2] 6ccd0715-0903-4fed-bf80-240f386e4ad8 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 868.435825] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-df7b79a3-c1e5-4275-8276-0328f72dd532 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.445218] env[69328]: DEBUG oslo_vmware.api [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 868.445218] env[69328]: value = "task-3273459" [ 868.445218] env[69328]: _type = "Task" [ 868.445218] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.459121] env[69328]: DEBUG oslo_vmware.api [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273459, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.556702] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-837c6ed2-ce23-47aa-85bf-703c68cdacee {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.561941] env[69328]: DEBUG nova.compute.manager [req-a063b57b-6c55-498a-8366-8f5bff1f9276 req-c1bc3802-849b-41e1-9c51-f1f24979128d service nova] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Received event network-vif-unplugged-79aa6a07-f43a-499b-9989-2017b35d1615 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 868.562399] env[69328]: DEBUG oslo_concurrency.lockutils [req-a063b57b-6c55-498a-8366-8f5bff1f9276 req-c1bc3802-849b-41e1-9c51-f1f24979128d service nova] Acquiring lock "6ccd0715-0903-4fed-bf80-240f386e4ad8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 868.562474] env[69328]: DEBUG oslo_concurrency.lockutils [req-a063b57b-6c55-498a-8366-8f5bff1f9276 req-c1bc3802-849b-41e1-9c51-f1f24979128d service nova] Lock "6ccd0715-0903-4fed-bf80-240f386e4ad8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 868.565551] env[69328]: DEBUG oslo_concurrency.lockutils [req-a063b57b-6c55-498a-8366-8f5bff1f9276 req-c1bc3802-849b-41e1-9c51-f1f24979128d service nova] Lock "6ccd0715-0903-4fed-bf80-240f386e4ad8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 868.565551] env[69328]: DEBUG nova.compute.manager [req-a063b57b-6c55-498a-8366-8f5bff1f9276 req-c1bc3802-849b-41e1-9c51-f1f24979128d service nova] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] No waiting events found dispatching network-vif-unplugged-79aa6a07-f43a-499b-9989-2017b35d1615 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 868.565551] env[69328]: WARNING nova.compute.manager [req-a063b57b-6c55-498a-8366-8f5bff1f9276 req-c1bc3802-849b-41e1-9c51-f1f24979128d service nova] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Received unexpected event network-vif-unplugged-79aa6a07-f43a-499b-9989-2017b35d1615 for instance with vm_state shelved and task_state shelving_offloading. [ 868.569562] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61c9f482-a171-40fc-ab82-d59628f5785a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.609570] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65f6b906-5b69-4a52-8982-a62408a1060c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.617816] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce1144bc-7ac5-435f-b102-b7cabde30670 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.632550] env[69328]: DEBUG nova.compute.provider_tree [None req-9c8fce6a-5227-4ccd-aaeb-edeccfb2e06e tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 868.684652] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9c24d043-b955-445f-957e-cdbf046ddeb2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "a0b663eb-31b0-4de1-94bc-660a7d9c1c7b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.334s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 868.706850] env[69328]: DEBUG nova.network.neutron [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] [instance: 7232ad5c-9f4e-425e-824a-4c3750f665eb] Successfully updated port: 0ccfb49b-7358-4b6d-ae9b-bf36dac84b40 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 868.722195] env[69328]: DEBUG nova.compute.manager [req-6fa823fe-99a1-4a16-a8d8-302c3a523d03 req-3b2f5423-42ca-4c76-8a90-58350454c338 service nova] [instance: 7232ad5c-9f4e-425e-824a-4c3750f665eb] Received event network-vif-plugged-0ccfb49b-7358-4b6d-ae9b-bf36dac84b40 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 868.722486] env[69328]: DEBUG oslo_concurrency.lockutils [req-6fa823fe-99a1-4a16-a8d8-302c3a523d03 req-3b2f5423-42ca-4c76-8a90-58350454c338 service nova] Acquiring lock "7232ad5c-9f4e-425e-824a-4c3750f665eb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 868.722664] env[69328]: DEBUG oslo_concurrency.lockutils [req-6fa823fe-99a1-4a16-a8d8-302c3a523d03 req-3b2f5423-42ca-4c76-8a90-58350454c338 service nova] Lock "7232ad5c-9f4e-425e-824a-4c3750f665eb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 868.722836] env[69328]: DEBUG oslo_concurrency.lockutils [req-6fa823fe-99a1-4a16-a8d8-302c3a523d03 req-3b2f5423-42ca-4c76-8a90-58350454c338 service nova] Lock "7232ad5c-9f4e-425e-824a-4c3750f665eb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 868.723012] env[69328]: DEBUG nova.compute.manager [req-6fa823fe-99a1-4a16-a8d8-302c3a523d03 req-3b2f5423-42ca-4c76-8a90-58350454c338 service nova] [instance: 7232ad5c-9f4e-425e-824a-4c3750f665eb] No waiting events found dispatching network-vif-plugged-0ccfb49b-7358-4b6d-ae9b-bf36dac84b40 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 868.723706] env[69328]: WARNING nova.compute.manager [req-6fa823fe-99a1-4a16-a8d8-302c3a523d03 req-3b2f5423-42ca-4c76-8a90-58350454c338 service nova] [instance: 7232ad5c-9f4e-425e-824a-4c3750f665eb] Received unexpected event network-vif-plugged-0ccfb49b-7358-4b6d-ae9b-bf36dac84b40 for instance with vm_state building and task_state spawning. [ 868.885644] env[69328]: INFO nova.compute.manager [-] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Took 1.04 seconds to deallocate network for instance. [ 868.962874] env[69328]: DEBUG oslo_vmware.api [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273459, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.3031} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.962874] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 868.962874] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 868.962874] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 869.030149] env[69328]: INFO nova.scheduler.client.report [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Deleted allocations for instance 6ccd0715-0903-4fed-bf80-240f386e4ad8 [ 869.138318] env[69328]: DEBUG nova.scheduler.client.report [None req-9c8fce6a-5227-4ccd-aaeb-edeccfb2e06e tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 869.212673] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Acquiring lock "refresh_cache-7232ad5c-9f4e-425e-824a-4c3750f665eb" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.212836] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Acquired lock "refresh_cache-7232ad5c-9f4e-425e-824a-4c3750f665eb" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 869.212989] env[69328]: DEBUG nova.network.neutron [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] [instance: 7232ad5c-9f4e-425e-824a-4c3750f665eb] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 869.392532] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f90bad43-d8d9-49f5-9ed7-78c713b3551a tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 869.536973] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 869.644236] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9c8fce6a-5227-4ccd-aaeb-edeccfb2e06e tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.435s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 869.646862] env[69328]: DEBUG oslo_concurrency.lockutils [None req-970b7cc7-0a5b-4e6c-8853-38e9d5f3f5be tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.499s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 869.647056] env[69328]: DEBUG nova.objects.instance [None req-970b7cc7-0a5b-4e6c-8853-38e9d5f3f5be tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Lazy-loading 'resources' on Instance uuid 15a8de08-4d20-4329-9867-53e5dff82878 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 869.714449] env[69328]: INFO nova.scheduler.client.report [None req-9c8fce6a-5227-4ccd-aaeb-edeccfb2e06e tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Deleted allocations for instance b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b [ 869.785295] env[69328]: DEBUG nova.network.neutron [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] [instance: 7232ad5c-9f4e-425e-824a-4c3750f665eb] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 870.033411] env[69328]: DEBUG nova.network.neutron [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] [instance: 7232ad5c-9f4e-425e-824a-4c3750f665eb] Updating instance_info_cache with network_info: [{"id": "0ccfb49b-7358-4b6d-ae9b-bf36dac84b40", "address": "fa:16:3e:f6:ea:30", "network": {"id": "9b74b7de-dc0f-454c-ae53-f627b1accd22", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1092009488-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b1b18725f7ab4f4dbc5fa03eaaf032e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "360308f4-9d0a-4ec2-8bcf-44891f452847", "external-id": "nsx-vlan-transportzone-383", "segmentation_id": 383, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ccfb49b-73", "ovs_interfaceid": "0ccfb49b-7358-4b6d-ae9b-bf36dac84b40", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 870.233413] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9c8fce6a-5227-4ccd-aaeb-edeccfb2e06e tempest-ServerShowV254Test-715300155 tempest-ServerShowV254Test-715300155-project-member] Lock "b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.119s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 870.523927] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d47df65a-9288-4640-a3af-9b59716c8f78 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.532782] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84b13abf-340e-4ae7-9554-b417f71565ea {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.537714] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Releasing lock "refresh_cache-7232ad5c-9f4e-425e-824a-4c3750f665eb" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 870.537714] env[69328]: DEBUG nova.compute.manager [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] [instance: 7232ad5c-9f4e-425e-824a-4c3750f665eb] Instance network_info: |[{"id": "0ccfb49b-7358-4b6d-ae9b-bf36dac84b40", "address": "fa:16:3e:f6:ea:30", "network": {"id": "9b74b7de-dc0f-454c-ae53-f627b1accd22", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1092009488-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b1b18725f7ab4f4dbc5fa03eaaf032e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "360308f4-9d0a-4ec2-8bcf-44891f452847", "external-id": "nsx-vlan-transportzone-383", "segmentation_id": 383, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ccfb49b-73", "ovs_interfaceid": "0ccfb49b-7358-4b6d-ae9b-bf36dac84b40", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 870.537714] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] [instance: 7232ad5c-9f4e-425e-824a-4c3750f665eb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f6:ea:30', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '360308f4-9d0a-4ec2-8bcf-44891f452847', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0ccfb49b-7358-4b6d-ae9b-bf36dac84b40', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 870.545223] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Creating folder: Project (b1b18725f7ab4f4dbc5fa03eaaf032e3). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 870.546068] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0e710886-3b21-4c78-bc13-11da4ac27e05 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.578277] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-801a0b04-efbb-4d34-9696-a2555212042d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.582600] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Created folder: Project (b1b18725f7ab4f4dbc5fa03eaaf032e3) in parent group-v653649. [ 870.582600] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Creating folder: Instances. Parent ref: group-v653839. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 870.582600] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-09f3f9e9-d236-4b3c-ac90-5e3547c419a6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.588646] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aa241a7-c54f-4106-90d5-70006cbffe2d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.593883] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Created folder: Instances in parent group-v653839. [ 870.594144] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 870.595481] env[69328]: DEBUG nova.compute.manager [req-2db52669-d138-48dd-8f89-9922f93c0311 req-0191eca6-a880-4c44-8700-ed751eb3e4d6 service nova] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Received event network-changed-79aa6a07-f43a-499b-9989-2017b35d1615 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 870.595481] env[69328]: DEBUG nova.compute.manager [req-2db52669-d138-48dd-8f89-9922f93c0311 req-0191eca6-a880-4c44-8700-ed751eb3e4d6 service nova] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Refreshing instance network info cache due to event network-changed-79aa6a07-f43a-499b-9989-2017b35d1615. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 870.595627] env[69328]: DEBUG oslo_concurrency.lockutils [req-2db52669-d138-48dd-8f89-9922f93c0311 req-0191eca6-a880-4c44-8700-ed751eb3e4d6 service nova] Acquiring lock "refresh_cache-6ccd0715-0903-4fed-bf80-240f386e4ad8" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 870.595767] env[69328]: DEBUG oslo_concurrency.lockutils [req-2db52669-d138-48dd-8f89-9922f93c0311 req-0191eca6-a880-4c44-8700-ed751eb3e4d6 service nova] Acquired lock "refresh_cache-6ccd0715-0903-4fed-bf80-240f386e4ad8" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 870.595969] env[69328]: DEBUG nova.network.neutron [req-2db52669-d138-48dd-8f89-9922f93c0311 req-0191eca6-a880-4c44-8700-ed751eb3e4d6 service nova] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Refreshing network info cache for port 79aa6a07-f43a-499b-9989-2017b35d1615 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 870.597721] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7232ad5c-9f4e-425e-824a-4c3750f665eb] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 870.598398] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4f1f5015-32aa-4954-b21d-ce7700f6e140 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.622317] env[69328]: DEBUG nova.compute.provider_tree [None req-970b7cc7-0a5b-4e6c-8853-38e9d5f3f5be tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 870.628660] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 870.628660] env[69328]: value = "task-3273462" [ 870.628660] env[69328]: _type = "Task" [ 870.628660] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.640097] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273462, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.717065] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "20f750d7-1914-49bb-802f-464a30ffcf3a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 870.717396] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "20f750d7-1914-49bb-802f-464a30ffcf3a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 870.889765] env[69328]: DEBUG nova.compute.manager [req-6b81b603-425f-497d-89cd-5be1eb8312ad req-054c8f0d-1446-4c14-856d-2a8bfafad052 service nova] [instance: 7232ad5c-9f4e-425e-824a-4c3750f665eb] Received event network-changed-0ccfb49b-7358-4b6d-ae9b-bf36dac84b40 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 870.889956] env[69328]: DEBUG nova.compute.manager [req-6b81b603-425f-497d-89cd-5be1eb8312ad req-054c8f0d-1446-4c14-856d-2a8bfafad052 service nova] [instance: 7232ad5c-9f4e-425e-824a-4c3750f665eb] Refreshing instance network info cache due to event network-changed-0ccfb49b-7358-4b6d-ae9b-bf36dac84b40. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 870.890198] env[69328]: DEBUG oslo_concurrency.lockutils [req-6b81b603-425f-497d-89cd-5be1eb8312ad req-054c8f0d-1446-4c14-856d-2a8bfafad052 service nova] Acquiring lock "refresh_cache-7232ad5c-9f4e-425e-824a-4c3750f665eb" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 870.890340] env[69328]: DEBUG oslo_concurrency.lockutils [req-6b81b603-425f-497d-89cd-5be1eb8312ad req-054c8f0d-1446-4c14-856d-2a8bfafad052 service nova] Acquired lock "refresh_cache-7232ad5c-9f4e-425e-824a-4c3750f665eb" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 870.890496] env[69328]: DEBUG nova.network.neutron [req-6b81b603-425f-497d-89cd-5be1eb8312ad req-054c8f0d-1446-4c14-856d-2a8bfafad052 service nova] [instance: 7232ad5c-9f4e-425e-824a-4c3750f665eb] Refreshing network info cache for port 0ccfb49b-7358-4b6d-ae9b-bf36dac84b40 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 871.126319] env[69328]: DEBUG nova.scheduler.client.report [None req-970b7cc7-0a5b-4e6c-8853-38e9d5f3f5be tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 871.139498] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273462, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.144839] env[69328]: DEBUG oslo_concurrency.lockutils [None req-887c48b4-8f6f-4201-aeae-a5cb8814a584 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "6ccd0715-0903-4fed-bf80-240f386e4ad8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 871.219674] env[69328]: DEBUG nova.compute.manager [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 20f750d7-1914-49bb-802f-464a30ffcf3a] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 871.354130] env[69328]: DEBUG nova.network.neutron [req-2db52669-d138-48dd-8f89-9922f93c0311 req-0191eca6-a880-4c44-8700-ed751eb3e4d6 service nova] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Updated VIF entry in instance network info cache for port 79aa6a07-f43a-499b-9989-2017b35d1615. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 871.354130] env[69328]: DEBUG nova.network.neutron [req-2db52669-d138-48dd-8f89-9922f93c0311 req-0191eca6-a880-4c44-8700-ed751eb3e4d6 service nova] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Updating instance_info_cache with network_info: [{"id": "79aa6a07-f43a-499b-9989-2017b35d1615", "address": "fa:16:3e:9d:9e:a9", "network": {"id": "620f277d-ca49-41da-83a0-afb8c393e26e", "bridge": null, "label": "tempest-DeleteServersTestJSON-1223326239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cdc479a290524130b9d17e627a64b65a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap79aa6a07-f4", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 871.627795] env[69328]: DEBUG nova.network.neutron [req-6b81b603-425f-497d-89cd-5be1eb8312ad req-054c8f0d-1446-4c14-856d-2a8bfafad052 service nova] [instance: 7232ad5c-9f4e-425e-824a-4c3750f665eb] Updated VIF entry in instance network info cache for port 0ccfb49b-7358-4b6d-ae9b-bf36dac84b40. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 871.628219] env[69328]: DEBUG nova.network.neutron [req-6b81b603-425f-497d-89cd-5be1eb8312ad req-054c8f0d-1446-4c14-856d-2a8bfafad052 service nova] [instance: 7232ad5c-9f4e-425e-824a-4c3750f665eb] Updating instance_info_cache with network_info: [{"id": "0ccfb49b-7358-4b6d-ae9b-bf36dac84b40", "address": "fa:16:3e:f6:ea:30", "network": {"id": "9b74b7de-dc0f-454c-ae53-f627b1accd22", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1092009488-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b1b18725f7ab4f4dbc5fa03eaaf032e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "360308f4-9d0a-4ec2-8bcf-44891f452847", "external-id": "nsx-vlan-transportzone-383", "segmentation_id": 383, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ccfb49b-73", "ovs_interfaceid": "0ccfb49b-7358-4b6d-ae9b-bf36dac84b40", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 871.635149] env[69328]: DEBUG oslo_concurrency.lockutils [None req-970b7cc7-0a5b-4e6c-8853-38e9d5f3f5be tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.988s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 871.640440] env[69328]: DEBUG oslo_concurrency.lockutils [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 26.762s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 871.641836] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273462, 'name': CreateVM_Task, 'duration_secs': 0.550242} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.642479] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7232ad5c-9f4e-425e-824a-4c3750f665eb] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 871.643105] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.643286] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 871.643591] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 871.644519] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a8146adf-9e9d-4f3f-8d45-ffd71601b2c9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.654256] env[69328]: DEBUG oslo_vmware.api [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Waiting for the task: (returnval){ [ 871.654256] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e6b5b9-35ed-3078-0a0e-15aca6303310" [ 871.654256] env[69328]: _type = "Task" [ 871.654256] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.662987] env[69328]: DEBUG oslo_vmware.api [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e6b5b9-35ed-3078-0a0e-15aca6303310, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.671992] env[69328]: INFO nova.scheduler.client.report [None req-970b7cc7-0a5b-4e6c-8853-38e9d5f3f5be tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Deleted allocations for instance 15a8de08-4d20-4329-9867-53e5dff82878 [ 871.740179] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 871.856779] env[69328]: DEBUG oslo_concurrency.lockutils [req-2db52669-d138-48dd-8f89-9922f93c0311 req-0191eca6-a880-4c44-8700-ed751eb3e4d6 service nova] Releasing lock "refresh_cache-6ccd0715-0903-4fed-bf80-240f386e4ad8" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 872.131299] env[69328]: DEBUG oslo_concurrency.lockutils [req-6b81b603-425f-497d-89cd-5be1eb8312ad req-054c8f0d-1446-4c14-856d-2a8bfafad052 service nova] Releasing lock "refresh_cache-7232ad5c-9f4e-425e-824a-4c3750f665eb" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 872.131616] env[69328]: DEBUG nova.compute.manager [req-6b81b603-425f-497d-89cd-5be1eb8312ad req-054c8f0d-1446-4c14-856d-2a8bfafad052 service nova] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Received event network-changed-7748ad51-059a-4dd5-b929-13b3fbac9d5c {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 872.131795] env[69328]: DEBUG nova.compute.manager [req-6b81b603-425f-497d-89cd-5be1eb8312ad req-054c8f0d-1446-4c14-856d-2a8bfafad052 service nova] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Refreshing instance network info cache due to event network-changed-7748ad51-059a-4dd5-b929-13b3fbac9d5c. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 872.132023] env[69328]: DEBUG oslo_concurrency.lockutils [req-6b81b603-425f-497d-89cd-5be1eb8312ad req-054c8f0d-1446-4c14-856d-2a8bfafad052 service nova] Acquiring lock "refresh_cache-55d9ba65-e5c8-446a-a209-a840f30ff02c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 872.132172] env[69328]: DEBUG oslo_concurrency.lockutils [req-6b81b603-425f-497d-89cd-5be1eb8312ad req-054c8f0d-1446-4c14-856d-2a8bfafad052 service nova] Acquired lock "refresh_cache-55d9ba65-e5c8-446a-a209-a840f30ff02c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 872.132343] env[69328]: DEBUG nova.network.neutron [req-6b81b603-425f-497d-89cd-5be1eb8312ad req-054c8f0d-1446-4c14-856d-2a8bfafad052 service nova] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Refreshing network info cache for port 7748ad51-059a-4dd5-b929-13b3fbac9d5c {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 872.146657] env[69328]: DEBUG nova.objects.instance [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Lazy-loading 'migration_context' on Instance uuid 25fb207b-9388-4198-bb48-ab7cebd43375 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 872.164519] env[69328]: DEBUG oslo_vmware.api [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e6b5b9-35ed-3078-0a0e-15aca6303310, 'name': SearchDatastore_Task, 'duration_secs': 0.015368} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.164796] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 872.165025] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] [instance: 7232ad5c-9f4e-425e-824a-4c3750f665eb] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 872.165268] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 872.165411] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 872.165586] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 872.166045] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f9b79114-d698-406a-b12d-a662337bbb01 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.177778] env[69328]: DEBUG oslo_concurrency.lockutils [None req-970b7cc7-0a5b-4e6c-8853-38e9d5f3f5be tempest-ServerRescueTestJSON-652790161 tempest-ServerRescueTestJSON-652790161-project-member] Lock "15a8de08-4d20-4329-9867-53e5dff82878" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.775s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 872.180710] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 872.180885] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 872.181602] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2217693-6e0d-4d90-b299-da4d7b2dbf9e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.186627] env[69328]: DEBUG oslo_vmware.api [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Waiting for the task: (returnval){ [ 872.186627] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5268edfe-cd23-2a3e-6d81-7637c89d5926" [ 872.186627] env[69328]: _type = "Task" [ 872.186627] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.194084] env[69328]: DEBUG oslo_vmware.api [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5268edfe-cd23-2a3e-6d81-7637c89d5926, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.698961] env[69328]: DEBUG oslo_vmware.api [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5268edfe-cd23-2a3e-6d81-7637c89d5926, 'name': SearchDatastore_Task, 'duration_secs': 0.023895} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.703317] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f5fa444-05c9-44a6-8872-d7411baee1da {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.711715] env[69328]: DEBUG oslo_vmware.api [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Waiting for the task: (returnval){ [ 872.711715] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]521f51cf-0a26-4470-cdcc-0fa579747e22" [ 872.711715] env[69328]: _type = "Task" [ 872.711715] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.721962] env[69328]: DEBUG oslo_vmware.api [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]521f51cf-0a26-4470-cdcc-0fa579747e22, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.864561] env[69328]: DEBUG nova.network.neutron [req-6b81b603-425f-497d-89cd-5be1eb8312ad req-054c8f0d-1446-4c14-856d-2a8bfafad052 service nova] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Updated VIF entry in instance network info cache for port 7748ad51-059a-4dd5-b929-13b3fbac9d5c. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 872.864928] env[69328]: DEBUG nova.network.neutron [req-6b81b603-425f-497d-89cd-5be1eb8312ad req-054c8f0d-1446-4c14-856d-2a8bfafad052 service nova] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Updating instance_info_cache with network_info: [{"id": "7748ad51-059a-4dd5-b929-13b3fbac9d5c", "address": "fa:16:3e:19:7f:27", "network": {"id": "3ab35a35-3138-447c-be5b-076dd28b923b", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-953121151-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.152", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ff1b8afdf994141b20b41c0a4088101", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7748ad51-05", "ovs_interfaceid": "7748ad51-059a-4dd5-b929-13b3fbac9d5c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 872.975432] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09e55169-e6b7-4725-ba35-02fdcf786054 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.983780] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73e05f93-4c37-4c04-9acb-769aa289dd4c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.015580] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e626bead-bd11-4cad-8bcf-2fb85934992e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.023957] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3762b07-87a2-4aab-8f1b-0aeb43253833 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.765550] env[69328]: DEBUG oslo_concurrency.lockutils [req-6b81b603-425f-497d-89cd-5be1eb8312ad req-054c8f0d-1446-4c14-856d-2a8bfafad052 service nova] Releasing lock "refresh_cache-55d9ba65-e5c8-446a-a209-a840f30ff02c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 873.776135] env[69328]: DEBUG oslo_vmware.api [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]521f51cf-0a26-4470-cdcc-0fa579747e22, 'name': SearchDatastore_Task, 'duration_secs': 0.013168} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.783661] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 873.783969] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 7232ad5c-9f4e-425e-824a-4c3750f665eb/7232ad5c-9f4e-425e-824a-4c3750f665eb.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 873.784456] env[69328]: DEBUG nova.compute.provider_tree [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 873.785741] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f55be386-ae79-4bf4-81d8-742358e4ebcf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.792274] env[69328]: DEBUG oslo_vmware.api [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Waiting for the task: (returnval){ [ 873.792274] env[69328]: value = "task-3273463" [ 873.792274] env[69328]: _type = "Task" [ 873.792274] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.800280] env[69328]: DEBUG oslo_vmware.api [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Task: {'id': task-3273463, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.288998] env[69328]: DEBUG nova.scheduler.client.report [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 874.302636] env[69328]: DEBUG oslo_vmware.api [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Task: {'id': task-3273463, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.478799} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.302735] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 7232ad5c-9f4e-425e-824a-4c3750f665eb/7232ad5c-9f4e-425e-824a-4c3750f665eb.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 874.302898] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] [instance: 7232ad5c-9f4e-425e-824a-4c3750f665eb] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 874.303175] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-23e8f129-9642-48bf-90e2-49146aa962c8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.310011] env[69328]: DEBUG oslo_vmware.api [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Waiting for the task: (returnval){ [ 874.310011] env[69328]: value = "task-3273464" [ 874.310011] env[69328]: _type = "Task" [ 874.310011] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.321340] env[69328]: DEBUG oslo_vmware.api [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Task: {'id': task-3273464, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.823787] env[69328]: DEBUG oslo_vmware.api [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Task: {'id': task-3273464, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06937} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.824945] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] [instance: 7232ad5c-9f4e-425e-824a-4c3750f665eb] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 874.825764] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dce69cf-ee44-48b3-9252-19377d04e368 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.849064] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] [instance: 7232ad5c-9f4e-425e-824a-4c3750f665eb] Reconfiguring VM instance instance-00000041 to attach disk [datastore2] 7232ad5c-9f4e-425e-824a-4c3750f665eb/7232ad5c-9f4e-425e-824a-4c3750f665eb.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 874.849672] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e9792764-ca2d-4a6c-9080-202de3e6702c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.869543] env[69328]: DEBUG oslo_vmware.api [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Waiting for the task: (returnval){ [ 874.869543] env[69328]: value = "task-3273465" [ 874.869543] env[69328]: _type = "Task" [ 874.869543] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.877228] env[69328]: DEBUG oslo_vmware.api [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Task: {'id': task-3273465, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.305015] env[69328]: DEBUG oslo_concurrency.lockutils [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 3.664s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 875.314816] env[69328]: DEBUG oslo_concurrency.lockutils [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.317s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 875.317845] env[69328]: INFO nova.compute.claims [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 875.384074] env[69328]: DEBUG oslo_vmware.api [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Task: {'id': task-3273465, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.883800] env[69328]: DEBUG oslo_vmware.api [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Task: {'id': task-3273465, 'name': ReconfigVM_Task, 'duration_secs': 0.819514} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.883800] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] [instance: 7232ad5c-9f4e-425e-824a-4c3750f665eb] Reconfigured VM instance instance-00000041 to attach disk [datastore2] 7232ad5c-9f4e-425e-824a-4c3750f665eb/7232ad5c-9f4e-425e-824a-4c3750f665eb.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 875.883800] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c4c5cfd7-019f-4c08-93cd-d5d17eff43a0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.889352] env[69328]: DEBUG oslo_vmware.api [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Waiting for the task: (returnval){ [ 875.889352] env[69328]: value = "task-3273466" [ 875.889352] env[69328]: _type = "Task" [ 875.889352] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.902822] env[69328]: DEBUG oslo_vmware.api [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Task: {'id': task-3273466, 'name': Rename_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.400861] env[69328]: DEBUG oslo_vmware.api [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Task: {'id': task-3273466, 'name': Rename_Task, 'duration_secs': 0.157991} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.401163] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] [instance: 7232ad5c-9f4e-425e-824a-4c3750f665eb] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 876.403570] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3503b5d3-3f96-4152-af0b-a903e235fada {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.410954] env[69328]: DEBUG oslo_vmware.api [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Waiting for the task: (returnval){ [ 876.410954] env[69328]: value = "task-3273467" [ 876.410954] env[69328]: _type = "Task" [ 876.410954] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.421658] env[69328]: DEBUG oslo_vmware.api [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Task: {'id': task-3273467, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.726691] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62360802-3b52-4ec8-ba61-4808e20fca9f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.734625] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3cf9a5d-1602-41df-831e-d98e3de9777f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.765505] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-288ebaa0-79bc-444f-9313-bccbf5ed3ae1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.772883] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26c61eb8-d082-4ac8-ad2e-59d137e737d0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.786167] env[69328]: DEBUG nova.compute.provider_tree [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 876.861844] env[69328]: INFO nova.compute.manager [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Swapping old allocation on dict_keys(['149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e']) held by migration b5ff88a7-0c06-459d-8382-fae134bf7dff for instance [ 876.885107] env[69328]: DEBUG nova.scheduler.client.report [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Overwriting current allocation {'allocations': {'149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 94}}, 'project_id': 'f2aed2695f2d437fbe9202124d2ed95b', 'user_id': '91ee0807be574796bec53919ecd5a934', 'consumer_generation': 1} on consumer 25fb207b-9388-4198-bb48-ab7cebd43375 {{(pid=69328) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 876.921859] env[69328]: DEBUG oslo_vmware.api [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Task: {'id': task-3273467, 'name': PowerOnVM_Task, 'duration_secs': 0.499977} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.923206] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] [instance: 7232ad5c-9f4e-425e-824a-4c3750f665eb] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 876.923206] env[69328]: INFO nova.compute.manager [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] [instance: 7232ad5c-9f4e-425e-824a-4c3750f665eb] Took 9.70 seconds to spawn the instance on the hypervisor. [ 876.923466] env[69328]: DEBUG nova.compute.manager [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] [instance: 7232ad5c-9f4e-425e-824a-4c3750f665eb] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 876.924520] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bedd4f64-3963-4e59-b6d7-2780201cb504 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.990747] env[69328]: DEBUG oslo_concurrency.lockutils [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Acquiring lock "refresh_cache-25fb207b-9388-4198-bb48-ab7cebd43375" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 876.990923] env[69328]: DEBUG oslo_concurrency.lockutils [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Acquired lock "refresh_cache-25fb207b-9388-4198-bb48-ab7cebd43375" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 876.991108] env[69328]: DEBUG nova.network.neutron [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 877.308068] env[69328]: ERROR nova.scheduler.client.report [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] [req-c7c93363-4130-4655-b131-705ca3ed4b7d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c7c93363-4130-4655-b131-705ca3ed4b7d"}]} [ 877.323732] env[69328]: DEBUG nova.scheduler.client.report [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Refreshing inventories for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 877.337412] env[69328]: DEBUG nova.scheduler.client.report [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Updating ProviderTree inventory for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 877.337412] env[69328]: DEBUG nova.compute.provider_tree [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 877.349025] env[69328]: DEBUG nova.scheduler.client.report [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Refreshing aggregate associations for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e, aggregates: None {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 877.372397] env[69328]: DEBUG nova.scheduler.client.report [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Refreshing trait associations for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e, traits: COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 877.446427] env[69328]: INFO nova.compute.manager [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] [instance: 7232ad5c-9f4e-425e-824a-4c3750f665eb] Took 49.09 seconds to build instance. [ 877.489626] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquiring lock "c751ef77-c3be-46cd-b7eb-fe139bf0998b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 877.489716] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Lock "c751ef77-c3be-46cd-b7eb-fe139bf0998b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 877.699233] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eba2a2a-79e7-440c-9dc1-8dd0b668c5cb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.706878] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74b0b26d-e60b-43f4-9610-b4e7bc5310ea {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.741204] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4503f447-7774-4ca4-8906-4dd946c7f981 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.748604] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-686fe3f4-76a7-465e-b006-a4ec9af54981 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.761893] env[69328]: DEBUG nova.compute.provider_tree [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 877.802851] env[69328]: DEBUG nova.network.neutron [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Updating instance_info_cache with network_info: [{"id": "32db9785-1822-4acf-9971-06db92f35c18", "address": "fa:16:3e:3e:aa:b8", "network": {"id": "4707aea3-ce46-4fe0-bf5c-141ee5596a86", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.119", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "ecd184c6b78b4e4297fb93abb94aa37d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32db9785-18", "ovs_interfaceid": "32db9785-1822-4acf-9971-06db92f35c18", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 877.948652] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ae365e40-0c05-431b-80ba-b66f638c662f tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Lock "7232ad5c-9f4e-425e-824a-4c3750f665eb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.743s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 877.995218] env[69328]: DEBUG nova.compute.manager [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 878.154560] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquiring lock "1413dcfe-3570-4657-b811-81a1acc159d1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 878.154802] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Lock "1413dcfe-3570-4657-b811-81a1acc159d1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 878.289544] env[69328]: ERROR nova.scheduler.client.report [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] [req-ba562f9d-db9e-4403-8cc1-7e1996cd2927] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-ba562f9d-db9e-4403-8cc1-7e1996cd2927"}]} [ 878.306459] env[69328]: DEBUG nova.scheduler.client.report [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Refreshing inventories for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 878.308529] env[69328]: DEBUG oslo_concurrency.lockutils [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Releasing lock "refresh_cache-25fb207b-9388-4198-bb48-ab7cebd43375" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 878.308945] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 878.309755] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9bfd303d-031f-473e-a30e-fe297a068c92 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.317284] env[69328]: DEBUG oslo_vmware.api [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Waiting for the task: (returnval){ [ 878.317284] env[69328]: value = "task-3273468" [ 878.317284] env[69328]: _type = "Task" [ 878.317284] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.326304] env[69328]: DEBUG oslo_vmware.api [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3273468, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.327186] env[69328]: DEBUG nova.scheduler.client.report [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Updating ProviderTree inventory for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 878.327385] env[69328]: DEBUG nova.compute.provider_tree [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 878.342034] env[69328]: DEBUG nova.scheduler.client.report [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Refreshing aggregate associations for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e, aggregates: None {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 878.361237] env[69328]: DEBUG nova.scheduler.client.report [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Refreshing trait associations for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e, traits: COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 878.515127] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 878.585729] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e9757ca2-942a-4ea5-b6e2-d822199969ea tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Acquiring lock "7232ad5c-9f4e-425e-824a-4c3750f665eb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 878.585952] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e9757ca2-942a-4ea5-b6e2-d822199969ea tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Lock "7232ad5c-9f4e-425e-824a-4c3750f665eb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 878.586177] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e9757ca2-942a-4ea5-b6e2-d822199969ea tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Acquiring lock "7232ad5c-9f4e-425e-824a-4c3750f665eb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 878.586367] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e9757ca2-942a-4ea5-b6e2-d822199969ea tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Lock "7232ad5c-9f4e-425e-824a-4c3750f665eb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 878.586562] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e9757ca2-942a-4ea5-b6e2-d822199969ea tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Lock "7232ad5c-9f4e-425e-824a-4c3750f665eb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 878.592221] env[69328]: INFO nova.compute.manager [None req-e9757ca2-942a-4ea5-b6e2-d822199969ea tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] [instance: 7232ad5c-9f4e-425e-824a-4c3750f665eb] Terminating instance [ 878.656967] env[69328]: DEBUG nova.compute.manager [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 878.681840] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a96d59f-f8ab-4d48-b766-eaef57257bba {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.689290] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92d1aefc-98b7-4757-acb6-deb054be6182 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.720641] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6e34095-114a-46ad-b6a3-4b0bb029939a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.728225] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cf0f2b1-67f0-492a-ab1f-befce67c91af {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.742239] env[69328]: DEBUG nova.compute.provider_tree [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 878.826855] env[69328]: DEBUG oslo_vmware.api [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3273468, 'name': PowerOffVM_Task, 'duration_secs': 0.188537} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.827074] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 878.827756] env[69328]: DEBUG nova.virt.hardware [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:37:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='32e26fb7-ec83-4d85-ade8-a07c889bcc21',id=40,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-718846255',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 878.827970] env[69328]: DEBUG nova.virt.hardware [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 878.828140] env[69328]: DEBUG nova.virt.hardware [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 878.828329] env[69328]: DEBUG nova.virt.hardware [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 878.828475] env[69328]: DEBUG nova.virt.hardware [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 878.828621] env[69328]: DEBUG nova.virt.hardware [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 878.828839] env[69328]: DEBUG nova.virt.hardware [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 878.829062] env[69328]: DEBUG nova.virt.hardware [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 878.829177] env[69328]: DEBUG nova.virt.hardware [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 878.829340] env[69328]: DEBUG nova.virt.hardware [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 878.829511] env[69328]: DEBUG nova.virt.hardware [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 878.834456] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3cee0a54-d815-43c3-9118-efec3e2e3eee {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.852668] env[69328]: DEBUG oslo_vmware.api [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Waiting for the task: (returnval){ [ 878.852668] env[69328]: value = "task-3273469" [ 878.852668] env[69328]: _type = "Task" [ 878.852668] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.861266] env[69328]: DEBUG oslo_vmware.api [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3273469, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.097815] env[69328]: DEBUG nova.compute.manager [None req-e9757ca2-942a-4ea5-b6e2-d822199969ea tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] [instance: 7232ad5c-9f4e-425e-824a-4c3750f665eb] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 879.098156] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e9757ca2-942a-4ea5-b6e2-d822199969ea tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] [instance: 7232ad5c-9f4e-425e-824a-4c3750f665eb] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 879.098977] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaa0f5b4-b0f3-4b48-9796-4ac4992730ca {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.106672] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9757ca2-942a-4ea5-b6e2-d822199969ea tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] [instance: 7232ad5c-9f4e-425e-824a-4c3750f665eb] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 879.106915] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-de4c4bd0-aab5-4b08-aaa6-393ad0d2b857 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.112384] env[69328]: DEBUG oslo_vmware.api [None req-e9757ca2-942a-4ea5-b6e2-d822199969ea tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Waiting for the task: (returnval){ [ 879.112384] env[69328]: value = "task-3273470" [ 879.112384] env[69328]: _type = "Task" [ 879.112384] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.121053] env[69328]: DEBUG oslo_vmware.api [None req-e9757ca2-942a-4ea5-b6e2-d822199969ea tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Task: {'id': task-3273470, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.178663] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 879.282598] env[69328]: DEBUG nova.scheduler.client.report [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Updated inventory for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with generation 97 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 879.282876] env[69328]: DEBUG nova.compute.provider_tree [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Updating resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e generation from 97 to 98 during operation: update_inventory {{(pid=69328) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 879.283066] env[69328]: DEBUG nova.compute.provider_tree [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 879.362890] env[69328]: DEBUG oslo_vmware.api [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3273469, 'name': ReconfigVM_Task, 'duration_secs': 0.163257} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.363748] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a09961e2-73e6-4cac-9d05-5b18ca0ac7b9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.381249] env[69328]: DEBUG nova.virt.hardware [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:37:44Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='32e26fb7-ec83-4d85-ade8-a07c889bcc21',id=40,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-718846255',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 879.381536] env[69328]: DEBUG nova.virt.hardware [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 879.381700] env[69328]: DEBUG nova.virt.hardware [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 879.382104] env[69328]: DEBUG nova.virt.hardware [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 879.382104] env[69328]: DEBUG nova.virt.hardware [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 879.382170] env[69328]: DEBUG nova.virt.hardware [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 879.382362] env[69328]: DEBUG nova.virt.hardware [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 879.382525] env[69328]: DEBUG nova.virt.hardware [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 879.382690] env[69328]: DEBUG nova.virt.hardware [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 879.382848] env[69328]: DEBUG nova.virt.hardware [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 879.383058] env[69328]: DEBUG nova.virt.hardware [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 879.383809] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9229ada-6d69-40f9-ad84-16ade118a9b1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.389850] env[69328]: DEBUG oslo_vmware.api [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Waiting for the task: (returnval){ [ 879.389850] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52a487d1-b92f-45ce-a8d3-70b5e6546710" [ 879.389850] env[69328]: _type = "Task" [ 879.389850] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.397655] env[69328]: DEBUG oslo_vmware.api [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52a487d1-b92f-45ce-a8d3-70b5e6546710, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.623090] env[69328]: DEBUG oslo_vmware.api [None req-e9757ca2-942a-4ea5-b6e2-d822199969ea tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Task: {'id': task-3273470, 'name': PowerOffVM_Task, 'duration_secs': 0.182664} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.623090] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9757ca2-942a-4ea5-b6e2-d822199969ea tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] [instance: 7232ad5c-9f4e-425e-824a-4c3750f665eb] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 879.623090] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e9757ca2-942a-4ea5-b6e2-d822199969ea tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] [instance: 7232ad5c-9f4e-425e-824a-4c3750f665eb] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 879.623306] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f14b0c57-d1ef-43ef-9185-505d72315673 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.692359] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e9757ca2-942a-4ea5-b6e2-d822199969ea tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] [instance: 7232ad5c-9f4e-425e-824a-4c3750f665eb] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 879.692736] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e9757ca2-942a-4ea5-b6e2-d822199969ea tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] [instance: 7232ad5c-9f4e-425e-824a-4c3750f665eb] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 879.693043] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9757ca2-942a-4ea5-b6e2-d822199969ea tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Deleting the datastore file [datastore2] 7232ad5c-9f4e-425e-824a-4c3750f665eb {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 879.693423] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bf8fd250-06b2-4d91-96ce-fac3d698ec6b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.700781] env[69328]: DEBUG oslo_vmware.api [None req-e9757ca2-942a-4ea5-b6e2-d822199969ea tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Waiting for the task: (returnval){ [ 879.700781] env[69328]: value = "task-3273472" [ 879.700781] env[69328]: _type = "Task" [ 879.700781] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.711772] env[69328]: DEBUG oslo_vmware.api [None req-e9757ca2-942a-4ea5-b6e2-d822199969ea tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Task: {'id': task-3273472, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.787966] env[69328]: DEBUG oslo_concurrency.lockutils [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.473s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 879.788520] env[69328]: DEBUG nova.compute.manager [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 879.791175] env[69328]: DEBUG oslo_concurrency.lockutils [None req-19a2c12f-c0de-4685-897b-3e7ed6f2c4f1 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.785s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 879.791415] env[69328]: DEBUG nova.objects.instance [None req-19a2c12f-c0de-4685-897b-3e7ed6f2c4f1 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Lazy-loading 'resources' on Instance uuid 3daf7b73-5679-47ce-b847-f3786f1000d4 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 879.899388] env[69328]: DEBUG oslo_vmware.api [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52a487d1-b92f-45ce-a8d3-70b5e6546710, 'name': SearchDatastore_Task, 'duration_secs': 0.007344} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.904658] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Reconfiguring VM instance instance-0000002f to detach disk 2000 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 879.904921] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8780bc4d-0675-41d2-a8a3-a97bb888233e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.923278] env[69328]: DEBUG oslo_vmware.api [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Waiting for the task: (returnval){ [ 879.923278] env[69328]: value = "task-3273473" [ 879.923278] env[69328]: _type = "Task" [ 879.923278] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.930757] env[69328]: DEBUG oslo_vmware.api [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3273473, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.211452] env[69328]: DEBUG oslo_vmware.api [None req-e9757ca2-942a-4ea5-b6e2-d822199969ea tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Task: {'id': task-3273472, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151199} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.211746] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9757ca2-942a-4ea5-b6e2-d822199969ea tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 880.211907] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e9757ca2-942a-4ea5-b6e2-d822199969ea tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] [instance: 7232ad5c-9f4e-425e-824a-4c3750f665eb] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 880.212099] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e9757ca2-942a-4ea5-b6e2-d822199969ea tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] [instance: 7232ad5c-9f4e-425e-824a-4c3750f665eb] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 880.212274] env[69328]: INFO nova.compute.manager [None req-e9757ca2-942a-4ea5-b6e2-d822199969ea tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] [instance: 7232ad5c-9f4e-425e-824a-4c3750f665eb] Took 1.11 seconds to destroy the instance on the hypervisor. [ 880.212581] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e9757ca2-942a-4ea5-b6e2-d822199969ea tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 880.212785] env[69328]: DEBUG nova.compute.manager [-] [instance: 7232ad5c-9f4e-425e-824a-4c3750f665eb] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 880.212879] env[69328]: DEBUG nova.network.neutron [-] [instance: 7232ad5c-9f4e-425e-824a-4c3750f665eb] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 880.296760] env[69328]: DEBUG nova.compute.utils [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 880.298885] env[69328]: DEBUG nova.compute.manager [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 880.299088] env[69328]: DEBUG nova.network.neutron [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 880.336415] env[69328]: DEBUG nova.policy [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5815ee93e968445fa7f316a6ae30c89d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3a7004dbbda84cc58b5ba7e1b8359df5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 880.434625] env[69328]: DEBUG oslo_vmware.api [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3273473, 'name': ReconfigVM_Task, 'duration_secs': 0.191836} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.437744] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Reconfigured VM instance instance-0000002f to detach disk 2000 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 880.438727] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc60ebfe-2c61-4c4e-bc47-0e84d70f3513 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.471029] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Reconfiguring VM instance instance-0000002f to attach disk [datastore1] 25fb207b-9388-4198-bb48-ab7cebd43375/25fb207b-9388-4198-bb48-ab7cebd43375.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 880.476231] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-15249540-f503-4a1e-b63b-c0ffc1fd2a0e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.490070] env[69328]: DEBUG nova.network.neutron [-] [instance: 7232ad5c-9f4e-425e-824a-4c3750f665eb] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.497789] env[69328]: DEBUG oslo_vmware.api [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Waiting for the task: (returnval){ [ 880.497789] env[69328]: value = "task-3273474" [ 880.497789] env[69328]: _type = "Task" [ 880.497789] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.510785] env[69328]: DEBUG oslo_vmware.api [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3273474, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.530079] env[69328]: DEBUG nova.compute.manager [req-20da16e5-d74e-4009-a032-5b4d86fcea8e req-53e2c180-7472-4215-830a-f1bd332ef7e3 service nova] [instance: 7232ad5c-9f4e-425e-824a-4c3750f665eb] Received event network-vif-deleted-0ccfb49b-7358-4b6d-ae9b-bf36dac84b40 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 880.697082] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-678a8025-8399-489a-925f-b5ccf4e8e4ce {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.704890] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe4da7c7-df9f-4f5b-9d39-7e252ec1cc53 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.735302] env[69328]: DEBUG nova.network.neutron [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Successfully created port: 7c92f501-6c17-4266-8177-0b568c42e422 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 880.737998] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5491582e-5ef9-43ed-b332-dd6687eec152 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.746040] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8de345a-e0c1-46fb-9728-694e1a5a030a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.760583] env[69328]: DEBUG nova.compute.provider_tree [None req-19a2c12f-c0de-4685-897b-3e7ed6f2c4f1 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 880.801604] env[69328]: DEBUG nova.compute.manager [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 880.994354] env[69328]: INFO nova.compute.manager [-] [instance: 7232ad5c-9f4e-425e-824a-4c3750f665eb] Took 0.78 seconds to deallocate network for instance. [ 881.008376] env[69328]: DEBUG oslo_vmware.api [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3273474, 'name': ReconfigVM_Task, 'duration_secs': 0.286449} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.008630] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Reconfigured VM instance instance-0000002f to attach disk [datastore1] 25fb207b-9388-4198-bb48-ab7cebd43375/25fb207b-9388-4198-bb48-ab7cebd43375.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 881.009441] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4783e2a3-0059-4576-a2c1-2716f755a986 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.028080] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee08d9b1-d945-458a-ba4f-68cdb3282ff5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.046112] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94e1cddf-1234-4e1c-bf91-17f52e8a09f7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.064462] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb8744cf-a906-4c9a-8dcb-b13069469e94 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.070774] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 881.071008] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a7577f1a-6ae0-4957-a072-0d42f2ffa3cd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.077728] env[69328]: DEBUG oslo_vmware.api [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Waiting for the task: (returnval){ [ 881.077728] env[69328]: value = "task-3273475" [ 881.077728] env[69328]: _type = "Task" [ 881.077728] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.085720] env[69328]: DEBUG oslo_vmware.api [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3273475, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.263658] env[69328]: DEBUG nova.scheduler.client.report [None req-19a2c12f-c0de-4685-897b-3e7ed6f2c4f1 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 881.500526] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e9757ca2-942a-4ea5-b6e2-d822199969ea tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 881.587962] env[69328]: DEBUG oslo_vmware.api [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3273475, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.769885] env[69328]: DEBUG oslo_concurrency.lockutils [None req-19a2c12f-c0de-4685-897b-3e7ed6f2c4f1 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.979s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 881.772413] env[69328]: DEBUG oslo_concurrency.lockutils [None req-497f040b-0ba7-4797-927d-dee828c96775 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.574s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 881.772657] env[69328]: DEBUG nova.objects.instance [None req-497f040b-0ba7-4797-927d-dee828c96775 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Lazy-loading 'resources' on Instance uuid d045c9ca-71f9-411e-9048-71b36c32f4b2 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 881.808123] env[69328]: INFO nova.scheduler.client.report [None req-19a2c12f-c0de-4685-897b-3e7ed6f2c4f1 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Deleted allocations for instance 3daf7b73-5679-47ce-b847-f3786f1000d4 [ 881.811092] env[69328]: DEBUG nova.compute.manager [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 881.843867] env[69328]: DEBUG nova.virt.hardware [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 881.844038] env[69328]: DEBUG nova.virt.hardware [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 881.844161] env[69328]: DEBUG nova.virt.hardware [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 881.844358] env[69328]: DEBUG nova.virt.hardware [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 881.844463] env[69328]: DEBUG nova.virt.hardware [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 881.844643] env[69328]: DEBUG nova.virt.hardware [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 881.844829] env[69328]: DEBUG nova.virt.hardware [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 881.844985] env[69328]: DEBUG nova.virt.hardware [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 881.845165] env[69328]: DEBUG nova.virt.hardware [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 881.845327] env[69328]: DEBUG nova.virt.hardware [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 881.845527] env[69328]: DEBUG nova.virt.hardware [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 881.846421] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acae7877-de18-47c0-8580-777b5868e212 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.854533] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b922cc5-409a-451b-97e5-7c9396eb7343 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.087964] env[69328]: DEBUG oslo_vmware.api [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3273475, 'name': PowerOnVM_Task, 'duration_secs': 0.766544} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.088251] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 882.107865] env[69328]: DEBUG nova.compute.manager [req-043c9df4-8f7c-44e4-9a20-6c5e17c1734d req-6ba04487-0324-4a30-9456-e2c80f25fed7 service nova] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Received event network-vif-plugged-7c92f501-6c17-4266-8177-0b568c42e422 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 882.108113] env[69328]: DEBUG oslo_concurrency.lockutils [req-043c9df4-8f7c-44e4-9a20-6c5e17c1734d req-6ba04487-0324-4a30-9456-e2c80f25fed7 service nova] Acquiring lock "5a45bd6a-b063-4104-a85a-d78a4bb9452e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 882.108330] env[69328]: DEBUG oslo_concurrency.lockutils [req-043c9df4-8f7c-44e4-9a20-6c5e17c1734d req-6ba04487-0324-4a30-9456-e2c80f25fed7 service nova] Lock "5a45bd6a-b063-4104-a85a-d78a4bb9452e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 882.108501] env[69328]: DEBUG oslo_concurrency.lockutils [req-043c9df4-8f7c-44e4-9a20-6c5e17c1734d req-6ba04487-0324-4a30-9456-e2c80f25fed7 service nova] Lock "5a45bd6a-b063-4104-a85a-d78a4bb9452e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 882.108670] env[69328]: DEBUG nova.compute.manager [req-043c9df4-8f7c-44e4-9a20-6c5e17c1734d req-6ba04487-0324-4a30-9456-e2c80f25fed7 service nova] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] No waiting events found dispatching network-vif-plugged-7c92f501-6c17-4266-8177-0b568c42e422 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 882.108941] env[69328]: WARNING nova.compute.manager [req-043c9df4-8f7c-44e4-9a20-6c5e17c1734d req-6ba04487-0324-4a30-9456-e2c80f25fed7 service nova] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Received unexpected event network-vif-plugged-7c92f501-6c17-4266-8177-0b568c42e422 for instance with vm_state building and task_state spawning. [ 882.195416] env[69328]: DEBUG nova.network.neutron [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Successfully updated port: 7c92f501-6c17-4266-8177-0b568c42e422 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 882.322085] env[69328]: DEBUG oslo_concurrency.lockutils [None req-19a2c12f-c0de-4685-897b-3e7ed6f2c4f1 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Lock "3daf7b73-5679-47ce-b847-f3786f1000d4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.914s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 882.600180] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0e47187-318d-45fa-970e-449cba3b9a0a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.608220] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f994409-e285-44d0-b08b-d55a192e0ddd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.641200] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a664b4b-540c-4ff2-b8aa-d4bfde6f93cd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.656300] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a100d699-1087-47e1-8557-f1ade83b6412 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.669907] env[69328]: DEBUG nova.compute.provider_tree [None req-497f040b-0ba7-4797-927d-dee828c96775 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 882.697743] env[69328]: DEBUG oslo_concurrency.lockutils [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Acquiring lock "refresh_cache-5a45bd6a-b063-4104-a85a-d78a4bb9452e" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.697944] env[69328]: DEBUG oslo_concurrency.lockutils [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Acquired lock "refresh_cache-5a45bd6a-b063-4104-a85a-d78a4bb9452e" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 882.698758] env[69328]: DEBUG nova.network.neutron [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 883.101837] env[69328]: INFO nova.compute.manager [None req-84333442-0ec5-48b0-baac-5f5306fee361 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Updating instance to original state: 'active' [ 883.209093] env[69328]: DEBUG nova.scheduler.client.report [None req-497f040b-0ba7-4797-927d-dee828c96775 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Updated inventory for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with generation 98 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 883.209384] env[69328]: DEBUG nova.compute.provider_tree [None req-497f040b-0ba7-4797-927d-dee828c96775 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Updating resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e generation from 98 to 99 during operation: update_inventory {{(pid=69328) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 883.209561] env[69328]: DEBUG nova.compute.provider_tree [None req-497f040b-0ba7-4797-927d-dee828c96775 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 883.365105] env[69328]: DEBUG nova.network.neutron [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 883.559147] env[69328]: DEBUG nova.network.neutron [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Updating instance_info_cache with network_info: [{"id": "7c92f501-6c17-4266-8177-0b568c42e422", "address": "fa:16:3e:1c:23:c6", "network": {"id": "eb2800dd-49f4-4d5c-ae71-277af2fe7a4b", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1464475148-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3a7004dbbda84cc58b5ba7e1b8359df5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52358fcc-0d9f-45dd-8c75-db533fd992c3", "external-id": "nsx-vlan-transportzone-77", "segmentation_id": 77, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c92f501-6c", "ovs_interfaceid": "7c92f501-6c17-4266-8177-0b568c42e422", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.716127] env[69328]: DEBUG oslo_concurrency.lockutils [None req-497f040b-0ba7-4797-927d-dee828c96775 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.944s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 883.723665] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.035s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 883.726158] env[69328]: INFO nova.compute.claims [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 883.750085] env[69328]: INFO nova.scheduler.client.report [None req-497f040b-0ba7-4797-927d-dee828c96775 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Deleted allocations for instance d045c9ca-71f9-411e-9048-71b36c32f4b2 [ 883.773801] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Acquiring lock "73d5b248-3c3e-4e38-8d9c-1f9bfdb38494" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 883.774842] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Lock "73d5b248-3c3e-4e38-8d9c-1f9bfdb38494" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 884.062303] env[69328]: DEBUG oslo_concurrency.lockutils [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Releasing lock "refresh_cache-5a45bd6a-b063-4104-a85a-d78a4bb9452e" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 884.062723] env[69328]: DEBUG nova.compute.manager [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Instance network_info: |[{"id": "7c92f501-6c17-4266-8177-0b568c42e422", "address": "fa:16:3e:1c:23:c6", "network": {"id": "eb2800dd-49f4-4d5c-ae71-277af2fe7a4b", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1464475148-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3a7004dbbda84cc58b5ba7e1b8359df5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52358fcc-0d9f-45dd-8c75-db533fd992c3", "external-id": "nsx-vlan-transportzone-77", "segmentation_id": 77, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c92f501-6c", "ovs_interfaceid": "7c92f501-6c17-4266-8177-0b568c42e422", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 884.063187] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1c:23:c6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '52358fcc-0d9f-45dd-8c75-db533fd992c3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7c92f501-6c17-4266-8177-0b568c42e422', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 884.071223] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Creating folder: Project (3a7004dbbda84cc58b5ba7e1b8359df5). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 884.071912] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3c2ca45a-a6ee-491b-8e3e-375cbc1caf24 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.084985] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Created folder: Project (3a7004dbbda84cc58b5ba7e1b8359df5) in parent group-v653649. [ 884.085188] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Creating folder: Instances. Parent ref: group-v653842. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 884.085426] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fa7280e9-e9fc-4bf8-94b7-766db8513d99 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.094257] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Created folder: Instances in parent group-v653842. [ 884.094481] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 884.094674] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 884.094867] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b65602bf-1ff9-40f3-84e8-5e1c65e8c8c2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.112774] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 884.112774] env[69328]: value = "task-3273478" [ 884.112774] env[69328]: _type = "Task" [ 884.112774] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.120241] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273478, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.136160] env[69328]: DEBUG nova.compute.manager [req-633bc1d9-3d05-454e-9c50-6ed3faf306a4 req-66f40bee-784b-4532-bd96-cb5361dde24b service nova] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Received event network-changed-7c92f501-6c17-4266-8177-0b568c42e422 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 884.136354] env[69328]: DEBUG nova.compute.manager [req-633bc1d9-3d05-454e-9c50-6ed3faf306a4 req-66f40bee-784b-4532-bd96-cb5361dde24b service nova] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Refreshing instance network info cache due to event network-changed-7c92f501-6c17-4266-8177-0b568c42e422. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 884.136569] env[69328]: DEBUG oslo_concurrency.lockutils [req-633bc1d9-3d05-454e-9c50-6ed3faf306a4 req-66f40bee-784b-4532-bd96-cb5361dde24b service nova] Acquiring lock "refresh_cache-5a45bd6a-b063-4104-a85a-d78a4bb9452e" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.136714] env[69328]: DEBUG oslo_concurrency.lockutils [req-633bc1d9-3d05-454e-9c50-6ed3faf306a4 req-66f40bee-784b-4532-bd96-cb5361dde24b service nova] Acquired lock "refresh_cache-5a45bd6a-b063-4104-a85a-d78a4bb9452e" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 884.136872] env[69328]: DEBUG nova.network.neutron [req-633bc1d9-3d05-454e-9c50-6ed3faf306a4 req-66f40bee-784b-4532-bd96-cb5361dde24b service nova] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Refreshing network info cache for port 7c92f501-6c17-4266-8177-0b568c42e422 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 884.222510] env[69328]: DEBUG oslo_concurrency.lockutils [None req-45537925-9aff-448a-99db-99d3a6cd0b05 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Acquiring lock "25fb207b-9388-4198-bb48-ab7cebd43375" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 884.222924] env[69328]: DEBUG oslo_concurrency.lockutils [None req-45537925-9aff-448a-99db-99d3a6cd0b05 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Lock "25fb207b-9388-4198-bb48-ab7cebd43375" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 884.223238] env[69328]: DEBUG oslo_concurrency.lockutils [None req-45537925-9aff-448a-99db-99d3a6cd0b05 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Acquiring lock "25fb207b-9388-4198-bb48-ab7cebd43375-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 884.223449] env[69328]: DEBUG oslo_concurrency.lockutils [None req-45537925-9aff-448a-99db-99d3a6cd0b05 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Lock "25fb207b-9388-4198-bb48-ab7cebd43375-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 884.223627] env[69328]: DEBUG oslo_concurrency.lockutils [None req-45537925-9aff-448a-99db-99d3a6cd0b05 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Lock "25fb207b-9388-4198-bb48-ab7cebd43375-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 884.225777] env[69328]: INFO nova.compute.manager [None req-45537925-9aff-448a-99db-99d3a6cd0b05 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Terminating instance [ 884.259435] env[69328]: DEBUG oslo_concurrency.lockutils [None req-497f040b-0ba7-4797-927d-dee828c96775 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Lock "d045c9ca-71f9-411e-9048-71b36c32f4b2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.545s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 884.276772] env[69328]: DEBUG nova.compute.manager [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 884.623829] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273478, 'name': CreateVM_Task, 'duration_secs': 0.348394} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.624388] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 884.625216] env[69328]: DEBUG oslo_concurrency.lockutils [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.625530] env[69328]: DEBUG oslo_concurrency.lockutils [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 884.625954] env[69328]: DEBUG oslo_concurrency.lockutils [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 884.626329] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99f716ae-8f81-4497-989d-2615105b0c4d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.631720] env[69328]: DEBUG oslo_vmware.api [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Waiting for the task: (returnval){ [ 884.631720] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]528fa409-129d-300f-09f1-c05a63d67870" [ 884.631720] env[69328]: _type = "Task" [ 884.631720] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.643097] env[69328]: DEBUG oslo_vmware.api [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]528fa409-129d-300f-09f1-c05a63d67870, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.729263] env[69328]: DEBUG nova.compute.manager [None req-45537925-9aff-448a-99db-99d3a6cd0b05 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 884.729263] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-45537925-9aff-448a-99db-99d3a6cd0b05 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 884.730116] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-661df56c-fac3-4bd2-9a08-6bfd9ad47b7e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.739643] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-45537925-9aff-448a-99db-99d3a6cd0b05 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 884.739643] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8bfc2d96-df91-436e-81d7-a67a449414cd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.745828] env[69328]: DEBUG oslo_vmware.api [None req-45537925-9aff-448a-99db-99d3a6cd0b05 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Waiting for the task: (returnval){ [ 884.745828] env[69328]: value = "task-3273479" [ 884.745828] env[69328]: _type = "Task" [ 884.745828] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.753585] env[69328]: DEBUG oslo_vmware.api [None req-45537925-9aff-448a-99db-99d3a6cd0b05 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3273479, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.796175] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 884.917530] env[69328]: DEBUG nova.network.neutron [req-633bc1d9-3d05-454e-9c50-6ed3faf306a4 req-66f40bee-784b-4532-bd96-cb5361dde24b service nova] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Updated VIF entry in instance network info cache for port 7c92f501-6c17-4266-8177-0b568c42e422. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 884.918165] env[69328]: DEBUG nova.network.neutron [req-633bc1d9-3d05-454e-9c50-6ed3faf306a4 req-66f40bee-784b-4532-bd96-cb5361dde24b service nova] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Updating instance_info_cache with network_info: [{"id": "7c92f501-6c17-4266-8177-0b568c42e422", "address": "fa:16:3e:1c:23:c6", "network": {"id": "eb2800dd-49f4-4d5c-ae71-277af2fe7a4b", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1464475148-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3a7004dbbda84cc58b5ba7e1b8359df5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52358fcc-0d9f-45dd-8c75-db533fd992c3", "external-id": "nsx-vlan-transportzone-77", "segmentation_id": 77, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c92f501-6c", "ovs_interfaceid": "7c92f501-6c17-4266-8177-0b568c42e422", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.101966] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78f3a79a-0b55-46c6-8ec7-e3fb79233b8c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.109479] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca19d594-6b60-4c35-b19f-584418ebe24c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.149493] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb50b675-5493-4ffb-bfdf-dd07cc8cf71f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.160217] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4004e1c5-5c9f-4f2b-b518-b505baa089c4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.164481] env[69328]: DEBUG oslo_vmware.api [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]528fa409-129d-300f-09f1-c05a63d67870, 'name': SearchDatastore_Task, 'duration_secs': 0.012687} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.164804] env[69328]: DEBUG oslo_concurrency.lockutils [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 885.165091] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 885.165354] env[69328]: DEBUG oslo_concurrency.lockutils [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.165503] env[69328]: DEBUG oslo_concurrency.lockutils [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 885.165776] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 885.166448] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ae4eb5da-60ed-4031-a3a3-e663398d388a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.177238] env[69328]: DEBUG nova.compute.provider_tree [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 885.187754] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 885.187948] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 885.188677] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-beac8f9e-0319-4bd7-994d-6767612bab61 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.194300] env[69328]: DEBUG oslo_vmware.api [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Waiting for the task: (returnval){ [ 885.194300] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52723d67-1a70-3e19-c724-e7a1174e8250" [ 885.194300] env[69328]: _type = "Task" [ 885.194300] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.204134] env[69328]: DEBUG oslo_vmware.api [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52723d67-1a70-3e19-c724-e7a1174e8250, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.256073] env[69328]: DEBUG oslo_vmware.api [None req-45537925-9aff-448a-99db-99d3a6cd0b05 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3273479, 'name': PowerOffVM_Task, 'duration_secs': 0.213959} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.256621] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-45537925-9aff-448a-99db-99d3a6cd0b05 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 885.256800] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-45537925-9aff-448a-99db-99d3a6cd0b05 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 885.257063] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5bde4934-0e1d-4d55-a9e4-336cf4c0b215 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.322252] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-45537925-9aff-448a-99db-99d3a6cd0b05 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 885.322470] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-45537925-9aff-448a-99db-99d3a6cd0b05 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 885.322693] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-45537925-9aff-448a-99db-99d3a6cd0b05 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Deleting the datastore file [datastore1] 25fb207b-9388-4198-bb48-ab7cebd43375 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 885.322959] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6c3589a7-e38f-4c8e-94da-e0ab5d1e6736 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.329050] env[69328]: DEBUG oslo_vmware.api [None req-45537925-9aff-448a-99db-99d3a6cd0b05 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Waiting for the task: (returnval){ [ 885.329050] env[69328]: value = "task-3273481" [ 885.329050] env[69328]: _type = "Task" [ 885.329050] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.336867] env[69328]: DEBUG oslo_vmware.api [None req-45537925-9aff-448a-99db-99d3a6cd0b05 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3273481, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.420865] env[69328]: DEBUG oslo_concurrency.lockutils [req-633bc1d9-3d05-454e-9c50-6ed3faf306a4 req-66f40bee-784b-4532-bd96-cb5361dde24b service nova] Releasing lock "refresh_cache-5a45bd6a-b063-4104-a85a-d78a4bb9452e" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 885.681045] env[69328]: DEBUG nova.scheduler.client.report [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 885.705778] env[69328]: DEBUG oslo_vmware.api [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52723d67-1a70-3e19-c724-e7a1174e8250, 'name': SearchDatastore_Task, 'duration_secs': 0.01248} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.706586] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bdea4bd9-3051-459f-863f-0f2965a7c2a1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.711967] env[69328]: DEBUG oslo_vmware.api [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Waiting for the task: (returnval){ [ 885.711967] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]520c07c6-7141-4f3c-274c-1f7afe7fcad1" [ 885.711967] env[69328]: _type = "Task" [ 885.711967] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.721102] env[69328]: DEBUG oslo_vmware.api [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]520c07c6-7141-4f3c-274c-1f7afe7fcad1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.840214] env[69328]: DEBUG oslo_vmware.api [None req-45537925-9aff-448a-99db-99d3a6cd0b05 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3273481, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.328603} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.840214] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-45537925-9aff-448a-99db-99d3a6cd0b05 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 885.840214] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-45537925-9aff-448a-99db-99d3a6cd0b05 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 885.840214] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-45537925-9aff-448a-99db-99d3a6cd0b05 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 885.840214] env[69328]: INFO nova.compute.manager [None req-45537925-9aff-448a-99db-99d3a6cd0b05 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Took 1.11 seconds to destroy the instance on the hypervisor. [ 885.840214] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-45537925-9aff-448a-99db-99d3a6cd0b05 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 885.840214] env[69328]: DEBUG nova.compute.manager [-] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 885.840214] env[69328]: DEBUG nova.network.neutron [-] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 886.186393] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.463s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 886.186994] env[69328]: DEBUG nova.compute.manager [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 886.193042] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f1bbabe5-bbef-4ef3-b7b0-228e0c3f9062 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.208s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 886.193042] env[69328]: DEBUG nova.objects.instance [None req-f1bbabe5-bbef-4ef3-b7b0-228e0c3f9062 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lazy-loading 'resources' on Instance uuid 62fa6807-f67d-4bf5-ba23-9e97f9da120e {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 886.222863] env[69328]: DEBUG oslo_vmware.api [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]520c07c6-7141-4f3c-274c-1f7afe7fcad1, 'name': SearchDatastore_Task, 'duration_secs': 0.023001} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.224468] env[69328]: DEBUG oslo_concurrency.lockutils [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 886.225117] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 5a45bd6a-b063-4104-a85a-d78a4bb9452e/5a45bd6a-b063-4104-a85a-d78a4bb9452e.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 886.225858] env[69328]: DEBUG nova.compute.manager [req-8b1c7744-468d-4cbb-93fc-e1de7389cbce req-9419c30d-9c46-444b-bdfb-c88e862461fc service nova] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Received event network-vif-deleted-32db9785-1822-4acf-9971-06db92f35c18 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 886.226123] env[69328]: INFO nova.compute.manager [req-8b1c7744-468d-4cbb-93fc-e1de7389cbce req-9419c30d-9c46-444b-bdfb-c88e862461fc service nova] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Neutron deleted interface 32db9785-1822-4acf-9971-06db92f35c18; detaching it from the instance and deleting it from the info cache [ 886.226335] env[69328]: DEBUG nova.network.neutron [req-8b1c7744-468d-4cbb-93fc-e1de7389cbce req-9419c30d-9c46-444b-bdfb-c88e862461fc service nova] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 886.227655] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7924bd1b-cc54-42b9-9633-203c0197d39e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.236890] env[69328]: DEBUG oslo_vmware.api [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Waiting for the task: (returnval){ [ 886.236890] env[69328]: value = "task-3273482" [ 886.236890] env[69328]: _type = "Task" [ 886.236890] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.245635] env[69328]: DEBUG oslo_vmware.api [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Task: {'id': task-3273482, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.694568] env[69328]: DEBUG nova.compute.utils [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 886.699895] env[69328]: DEBUG nova.compute.manager [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 886.700031] env[69328]: DEBUG nova.network.neutron [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 886.706393] env[69328]: DEBUG nova.network.neutron [-] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 886.731130] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b1d93914-996e-4cab-a394-7a4fba6f3dd8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.744148] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be2c7295-34f0-4543-ae4f-859744f6d9ef {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.757254] env[69328]: DEBUG nova.policy [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '19265c910cd04814978013416bf2a18a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '636412f89c9d488a9cfd6f19ef046efc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 886.764603] env[69328]: DEBUG oslo_vmware.api [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Task: {'id': task-3273482, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.453487} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.764847] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 5a45bd6a-b063-4104-a85a-d78a4bb9452e/5a45bd6a-b063-4104-a85a-d78a4bb9452e.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 886.765068] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 886.765317] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-13c7c3e6-1771-4db4-9fbe-f54e5395944c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.772069] env[69328]: DEBUG oslo_vmware.api [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Waiting for the task: (returnval){ [ 886.772069] env[69328]: value = "task-3273483" [ 886.772069] env[69328]: _type = "Task" [ 886.772069] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.784658] env[69328]: DEBUG nova.compute.manager [req-8b1c7744-468d-4cbb-93fc-e1de7389cbce req-9419c30d-9c46-444b-bdfb-c88e862461fc service nova] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Detach interface failed, port_id=32db9785-1822-4acf-9971-06db92f35c18, reason: Instance 25fb207b-9388-4198-bb48-ab7cebd43375 could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 886.794677] env[69328]: DEBUG oslo_vmware.api [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Task: {'id': task-3273483, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.080767] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-951d4dae-cf6f-40f7-8ead-3a9530dc78a4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.091863] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc804d5b-9f7f-40d4-a653-84b49cfd6764 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.121313] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-442b12a2-5745-4b69-832c-02c0f9e8001c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.128086] env[69328]: DEBUG nova.network.neutron [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Successfully created port: efb2e2c6-d681-4301-b80d-b7a78c91677c {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 887.131014] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e6648fb-ab6a-4302-a101-3a8512bddcb1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.144226] env[69328]: DEBUG nova.compute.provider_tree [None req-f1bbabe5-bbef-4ef3-b7b0-228e0c3f9062 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 887.200751] env[69328]: DEBUG nova.compute.manager [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 887.209134] env[69328]: INFO nova.compute.manager [-] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Took 1.37 seconds to deallocate network for instance. [ 887.293571] env[69328]: DEBUG oslo_vmware.api [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Task: {'id': task-3273483, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067738} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.293850] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 887.294733] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b78143fe-e7bb-42dd-9029-fd96515383e7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.318327] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Reconfiguring VM instance instance-00000042 to attach disk [datastore1] 5a45bd6a-b063-4104-a85a-d78a4bb9452e/5a45bd6a-b063-4104-a85a-d78a4bb9452e.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 887.318956] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4b76ca97-1738-4114-86f2-334fa0546ca1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.340900] env[69328]: DEBUG oslo_vmware.api [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Waiting for the task: (returnval){ [ 887.340900] env[69328]: value = "task-3273484" [ 887.340900] env[69328]: _type = "Task" [ 887.340900] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.348411] env[69328]: DEBUG oslo_vmware.api [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Task: {'id': task-3273484, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.647880] env[69328]: DEBUG nova.scheduler.client.report [None req-f1bbabe5-bbef-4ef3-b7b0-228e0c3f9062 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 887.702434] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Acquiring lock "1f568ba1-8591-499b-b1ee-da16e26f81fc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 887.702721] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Lock "1f568ba1-8591-499b-b1ee-da16e26f81fc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 887.715585] env[69328]: DEBUG oslo_concurrency.lockutils [None req-45537925-9aff-448a-99db-99d3a6cd0b05 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 887.851771] env[69328]: DEBUG oslo_vmware.api [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Task: {'id': task-3273484, 'name': ReconfigVM_Task, 'duration_secs': 0.300125} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.852051] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Reconfigured VM instance instance-00000042 to attach disk [datastore1] 5a45bd6a-b063-4104-a85a-d78a4bb9452e/5a45bd6a-b063-4104-a85a-d78a4bb9452e.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 887.852664] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-620de53e-f796-41d7-b307-7504d85d6a94 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.858969] env[69328]: DEBUG oslo_vmware.api [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Waiting for the task: (returnval){ [ 887.858969] env[69328]: value = "task-3273485" [ 887.858969] env[69328]: _type = "Task" [ 887.858969] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.866350] env[69328]: DEBUG oslo_vmware.api [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Task: {'id': task-3273485, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.154024] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f1bbabe5-bbef-4ef3-b7b0-228e0c3f9062 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.963s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 888.156695] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 37.715s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 888.189972] env[69328]: INFO nova.scheduler.client.report [None req-f1bbabe5-bbef-4ef3-b7b0-228e0c3f9062 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Deleted allocations for instance 62fa6807-f67d-4bf5-ba23-9e97f9da120e [ 888.205462] env[69328]: DEBUG nova.compute.manager [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 888.209457] env[69328]: DEBUG nova.compute.manager [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 888.241846] env[69328]: DEBUG nova.virt.hardware [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 888.241846] env[69328]: DEBUG nova.virt.hardware [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 888.242041] env[69328]: DEBUG nova.virt.hardware [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 888.242082] env[69328]: DEBUG nova.virt.hardware [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 888.242222] env[69328]: DEBUG nova.virt.hardware [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 888.242371] env[69328]: DEBUG nova.virt.hardware [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 888.242588] env[69328]: DEBUG nova.virt.hardware [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 888.242749] env[69328]: DEBUG nova.virt.hardware [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 888.243387] env[69328]: DEBUG nova.virt.hardware [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 888.243387] env[69328]: DEBUG nova.virt.hardware [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 888.243387] env[69328]: DEBUG nova.virt.hardware [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 888.247040] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec9bd3ef-3076-4737-bde0-12c778488834 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.253519] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1243fa61-9a39-4c2e-ad7f-11b1664c8633 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.368906] env[69328]: DEBUG oslo_vmware.api [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Task: {'id': task-3273485, 'name': Rename_Task, 'duration_secs': 0.14719} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.369194] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 888.369455] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b4d56f73-4303-4746-bdcc-b88aaca3621d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.376097] env[69328]: DEBUG oslo_vmware.api [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Waiting for the task: (returnval){ [ 888.376097] env[69328]: value = "task-3273486" [ 888.376097] env[69328]: _type = "Task" [ 888.376097] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.383257] env[69328]: DEBUG oslo_vmware.api [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Task: {'id': task-3273486, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.597135] env[69328]: DEBUG nova.compute.manager [req-d7f25a55-ac52-4fd7-b6c3-03a6e2a302e5 req-987b7006-bc1d-431f-a5ef-b29ffba84431 service nova] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Received event network-vif-plugged-efb2e2c6-d681-4301-b80d-b7a78c91677c {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 888.597463] env[69328]: DEBUG oslo_concurrency.lockutils [req-d7f25a55-ac52-4fd7-b6c3-03a6e2a302e5 req-987b7006-bc1d-431f-a5ef-b29ffba84431 service nova] Acquiring lock "d017d08e-5f9e-4d05-8914-3320d4c87c9b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 888.597816] env[69328]: DEBUG oslo_concurrency.lockutils [req-d7f25a55-ac52-4fd7-b6c3-03a6e2a302e5 req-987b7006-bc1d-431f-a5ef-b29ffba84431 service nova] Lock "d017d08e-5f9e-4d05-8914-3320d4c87c9b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 888.598323] env[69328]: DEBUG oslo_concurrency.lockutils [req-d7f25a55-ac52-4fd7-b6c3-03a6e2a302e5 req-987b7006-bc1d-431f-a5ef-b29ffba84431 service nova] Lock "d017d08e-5f9e-4d05-8914-3320d4c87c9b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 888.598586] env[69328]: DEBUG nova.compute.manager [req-d7f25a55-ac52-4fd7-b6c3-03a6e2a302e5 req-987b7006-bc1d-431f-a5ef-b29ffba84431 service nova] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] No waiting events found dispatching network-vif-plugged-efb2e2c6-d681-4301-b80d-b7a78c91677c {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 888.598781] env[69328]: WARNING nova.compute.manager [req-d7f25a55-ac52-4fd7-b6c3-03a6e2a302e5 req-987b7006-bc1d-431f-a5ef-b29ffba84431 service nova] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Received unexpected event network-vif-plugged-efb2e2c6-d681-4301-b80d-b7a78c91677c for instance with vm_state building and task_state spawning. [ 888.703723] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f1bbabe5-bbef-4ef3-b7b0-228e0c3f9062 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lock "62fa6807-f67d-4bf5-ba23-9e97f9da120e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.886s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 888.719261] env[69328]: DEBUG nova.network.neutron [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Successfully updated port: efb2e2c6-d681-4301-b80d-b7a78c91677c {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 888.735772] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 888.888448] env[69328]: DEBUG oslo_vmware.api [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Task: {'id': task-3273486, 'name': PowerOnVM_Task, 'duration_secs': 0.489523} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.889401] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 888.889401] env[69328]: INFO nova.compute.manager [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Took 7.08 seconds to spawn the instance on the hypervisor. [ 888.889401] env[69328]: DEBUG nova.compute.manager [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 888.890085] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c188fc5-fff8-4a9d-9f2d-563335b17a40 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.173274] env[69328]: INFO nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Updating resource usage from migration e58a0bcf-9f68-4aec-b3e1-10cfbcb5ca51 [ 889.196424] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 889.196583] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance bc9c3a41-7264-4d69-bc15-397b5fa0a8ad actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 889.196711] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance b0a1441c-81e2-4131-a2ff-f5042d559d9f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 889.196829] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 18022645-9a2a-489e-b0b1-486165f46f14 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 889.196953] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 889.197131] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 889.197176] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 4d320c76-45bb-451c-8fbb-3dd2d64f56d5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 889.197314] env[69328]: WARNING nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 4a990411-16cd-4e53-9068-29654b69abe6 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 889.197447] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 36f6aab5-2774-402b-9db6-9912f2d5d473 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 889.197663] env[69328]: WARNING nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 25fb207b-9388-4198-bb48-ab7cebd43375 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 889.197865] env[69328]: WARNING nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance e5a2de79-cfbc-4d9c-8b58-5aa819657978 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 889.198077] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 55d9ba65-e5c8-446a-a209-a840f30ff02c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 889.198222] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance a0b663eb-31b0-4de1-94bc-660a7d9c1c7b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 889.198355] env[69328]: WARNING nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 7232ad5c-9f4e-425e-824a-4c3750f665eb is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 889.198542] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 5a45bd6a-b063-4104-a85a-d78a4bb9452e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 889.198667] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance d017d08e-5f9e-4d05-8914-3320d4c87c9b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 889.226041] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquiring lock "refresh_cache-d017d08e-5f9e-4d05-8914-3320d4c87c9b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 889.226041] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquired lock "refresh_cache-d017d08e-5f9e-4d05-8914-3320d4c87c9b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 889.226184] env[69328]: DEBUG nova.network.neutron [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 889.405844] env[69328]: INFO nova.compute.manager [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Took 44.43 seconds to build instance. [ 889.701368] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance f1be93b2-08db-41fe-87c4-f4e5f964cfa4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 889.768044] env[69328]: DEBUG nova.network.neutron [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 889.908187] env[69328]: DEBUG oslo_concurrency.lockutils [None req-69bfe562-3f77-417c-9c0b-d9dc293d4468 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Lock "5a45bd6a-b063-4104-a85a-d78a4bb9452e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.947s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 889.915998] env[69328]: DEBUG nova.network.neutron [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Updating instance_info_cache with network_info: [{"id": "efb2e2c6-d681-4301-b80d-b7a78c91677c", "address": "fa:16:3e:08:d1:06", "network": {"id": "5b6ce910-5dc1-49bb-ad4b-fe1a86509fc3", "bridge": "br-int", "label": "tempest-ImagesTestJSON-224847435-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "636412f89c9d488a9cfd6f19ef046efc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d1e1e320-ec56-4fcc-b6e9-30aa210d3b36", "external-id": "nsx-vlan-transportzone-447", "segmentation_id": 447, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefb2e2c6-d6", "ovs_interfaceid": "efb2e2c6-d681-4301-b80d-b7a78c91677c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 889.935856] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7b321411-bac2-4cbe-9722-f2b745357bf4 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Acquiring lock "5a45bd6a-b063-4104-a85a-d78a4bb9452e" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 889.936126] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7b321411-bac2-4cbe-9722-f2b745357bf4 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Lock "5a45bd6a-b063-4104-a85a-d78a4bb9452e" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 889.936357] env[69328]: INFO nova.compute.manager [None req-7b321411-bac2-4cbe-9722-f2b745357bf4 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Rebooting instance [ 890.204040] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance a0952fdf-5570-4112-bc4d-e9f9cee1599c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 890.204218] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Migration e58a0bcf-9f68-4aec-b3e1-10cfbcb5ca51 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 890.204218] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance fd72bae3-cb72-48d0-a0df-9ea3a770a86c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 890.419660] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Releasing lock "refresh_cache-d017d08e-5f9e-4d05-8914-3320d4c87c9b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 890.420043] env[69328]: DEBUG nova.compute.manager [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Instance network_info: |[{"id": "efb2e2c6-d681-4301-b80d-b7a78c91677c", "address": "fa:16:3e:08:d1:06", "network": {"id": "5b6ce910-5dc1-49bb-ad4b-fe1a86509fc3", "bridge": "br-int", "label": "tempest-ImagesTestJSON-224847435-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "636412f89c9d488a9cfd6f19ef046efc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d1e1e320-ec56-4fcc-b6e9-30aa210d3b36", "external-id": "nsx-vlan-transportzone-447", "segmentation_id": 447, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefb2e2c6-d6", "ovs_interfaceid": "efb2e2c6-d681-4301-b80d-b7a78c91677c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 890.420472] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:08:d1:06', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd1e1e320-ec56-4fcc-b6e9-30aa210d3b36', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'efb2e2c6-d681-4301-b80d-b7a78c91677c', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 890.429014] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 890.429265] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 890.429525] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f7b7c3bf-0987-4fa3-bee6-1245efdad579 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.456893] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 890.456893] env[69328]: value = "task-3273487" [ 890.456893] env[69328]: _type = "Task" [ 890.456893] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.465538] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273487, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.469942] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7b321411-bac2-4cbe-9722-f2b745357bf4 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Acquiring lock "refresh_cache-5a45bd6a-b063-4104-a85a-d78a4bb9452e" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.470122] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7b321411-bac2-4cbe-9722-f2b745357bf4 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Acquired lock "refresh_cache-5a45bd6a-b063-4104-a85a-d78a4bb9452e" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 890.470296] env[69328]: DEBUG nova.network.neutron [None req-7b321411-bac2-4cbe-9722-f2b745357bf4 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 890.667540] env[69328]: DEBUG nova.compute.manager [req-cf6e41bd-34a9-4fb0-91ed-1f2f09efd706 req-6be08293-af11-4e48-859e-4e56380f26ff service nova] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Received event network-changed-efb2e2c6-d681-4301-b80d-b7a78c91677c {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 890.667776] env[69328]: DEBUG nova.compute.manager [req-cf6e41bd-34a9-4fb0-91ed-1f2f09efd706 req-6be08293-af11-4e48-859e-4e56380f26ff service nova] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Refreshing instance network info cache due to event network-changed-efb2e2c6-d681-4301-b80d-b7a78c91677c. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 890.668018] env[69328]: DEBUG oslo_concurrency.lockutils [req-cf6e41bd-34a9-4fb0-91ed-1f2f09efd706 req-6be08293-af11-4e48-859e-4e56380f26ff service nova] Acquiring lock "refresh_cache-d017d08e-5f9e-4d05-8914-3320d4c87c9b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.668159] env[69328]: DEBUG oslo_concurrency.lockutils [req-cf6e41bd-34a9-4fb0-91ed-1f2f09efd706 req-6be08293-af11-4e48-859e-4e56380f26ff service nova] Acquired lock "refresh_cache-d017d08e-5f9e-4d05-8914-3320d4c87c9b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 890.668315] env[69328]: DEBUG nova.network.neutron [req-cf6e41bd-34a9-4fb0-91ed-1f2f09efd706 req-6be08293-af11-4e48-859e-4e56380f26ff service nova] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Refreshing network info cache for port efb2e2c6-d681-4301-b80d-b7a78c91677c {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 890.707024] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 3ba646e8-a5c8-4917-a1c4-32b37affb598 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 890.967118] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273487, 'name': CreateVM_Task, 'duration_secs': 0.295904} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.967360] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 890.967919] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.968086] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 890.968404] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 890.968654] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9b863cd-7b9c-4fca-b1c6-43b7c6dca84f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.975492] env[69328]: DEBUG oslo_vmware.api [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 890.975492] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]526a7204-2cb8-3517-7dcf-d3c18bda771e" [ 890.975492] env[69328]: _type = "Task" [ 890.975492] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.984681] env[69328]: DEBUG oslo_vmware.api [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]526a7204-2cb8-3517-7dcf-d3c18bda771e, 'name': SearchDatastore_Task} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.984946] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 890.985190] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 890.985421] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.985569] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 890.985748] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 890.985991] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4fe7dc99-9d19-47e8-8720-8178820b67fe {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.993209] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 890.993389] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 890.994107] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f519c53e-6973-42f5-a4bf-8111fe9a7364 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.998874] env[69328]: DEBUG oslo_vmware.api [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 890.998874] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52626d81-4ac7-b174-b6c1-18ac9055bea3" [ 890.998874] env[69328]: _type = "Task" [ 890.998874] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.008399] env[69328]: DEBUG oslo_vmware.api [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52626d81-4ac7-b174-b6c1-18ac9055bea3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.187465] env[69328]: DEBUG nova.network.neutron [None req-7b321411-bac2-4cbe-9722-f2b745357bf4 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Updating instance_info_cache with network_info: [{"id": "7c92f501-6c17-4266-8177-0b568c42e422", "address": "fa:16:3e:1c:23:c6", "network": {"id": "eb2800dd-49f4-4d5c-ae71-277af2fe7a4b", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1464475148-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3a7004dbbda84cc58b5ba7e1b8359df5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52358fcc-0d9f-45dd-8c75-db533fd992c3", "external-id": "nsx-vlan-transportzone-77", "segmentation_id": 77, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c92f501-6c", "ovs_interfaceid": "7c92f501-6c17-4266-8177-0b568c42e422", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.209926] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 20f750d7-1914-49bb-802f-464a30ffcf3a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 891.365706] env[69328]: DEBUG nova.network.neutron [req-cf6e41bd-34a9-4fb0-91ed-1f2f09efd706 req-6be08293-af11-4e48-859e-4e56380f26ff service nova] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Updated VIF entry in instance network info cache for port efb2e2c6-d681-4301-b80d-b7a78c91677c. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 891.366225] env[69328]: DEBUG nova.network.neutron [req-cf6e41bd-34a9-4fb0-91ed-1f2f09efd706 req-6be08293-af11-4e48-859e-4e56380f26ff service nova] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Updating instance_info_cache with network_info: [{"id": "efb2e2c6-d681-4301-b80d-b7a78c91677c", "address": "fa:16:3e:08:d1:06", "network": {"id": "5b6ce910-5dc1-49bb-ad4b-fe1a86509fc3", "bridge": "br-int", "label": "tempest-ImagesTestJSON-224847435-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "636412f89c9d488a9cfd6f19ef046efc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d1e1e320-ec56-4fcc-b6e9-30aa210d3b36", "external-id": "nsx-vlan-transportzone-447", "segmentation_id": 447, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefb2e2c6-d6", "ovs_interfaceid": "efb2e2c6-d681-4301-b80d-b7a78c91677c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.514946] env[69328]: DEBUG oslo_vmware.api [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52626d81-4ac7-b174-b6c1-18ac9055bea3, 'name': SearchDatastore_Task, 'duration_secs': 0.008728} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.515966] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4359050a-120a-4f25-acd7-7d8eaea36fd0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.524082] env[69328]: DEBUG oslo_vmware.api [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 891.524082] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52bbf824-76c9-685c-9dec-1fc44983040a" [ 891.524082] env[69328]: _type = "Task" [ 891.524082] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.536332] env[69328]: DEBUG oslo_vmware.api [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52bbf824-76c9-685c-9dec-1fc44983040a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.690830] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7b321411-bac2-4cbe-9722-f2b745357bf4 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Releasing lock "refresh_cache-5a45bd6a-b063-4104-a85a-d78a4bb9452e" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 891.713240] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance c751ef77-c3be-46cd-b7eb-fe139bf0998b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 891.868923] env[69328]: DEBUG oslo_concurrency.lockutils [req-cf6e41bd-34a9-4fb0-91ed-1f2f09efd706 req-6be08293-af11-4e48-859e-4e56380f26ff service nova] Releasing lock "refresh_cache-d017d08e-5f9e-4d05-8914-3320d4c87c9b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 892.035197] env[69328]: DEBUG oslo_vmware.api [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52bbf824-76c9-685c-9dec-1fc44983040a, 'name': SearchDatastore_Task, 'duration_secs': 0.010238} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.035562] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 892.035911] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] d017d08e-5f9e-4d05-8914-3320d4c87c9b/d017d08e-5f9e-4d05-8914-3320d4c87c9b.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 892.036255] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-af763ee2-f3ff-40c7-ba79-ff0de3e34458 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.043243] env[69328]: DEBUG oslo_vmware.api [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 892.043243] env[69328]: value = "task-3273488" [ 892.043243] env[69328]: _type = "Task" [ 892.043243] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.053065] env[69328]: DEBUG oslo_vmware.api [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273488, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.195736] env[69328]: DEBUG nova.compute.manager [None req-7b321411-bac2-4cbe-9722-f2b745357bf4 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 892.196642] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bfce1be-a427-4271-ab3b-c83a010a80cc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.216248] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 1413dcfe-3570-4657-b811-81a1acc159d1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 892.553893] env[69328]: DEBUG oslo_vmware.api [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273488, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.474153} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.554156] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] d017d08e-5f9e-4d05-8914-3320d4c87c9b/d017d08e-5f9e-4d05-8914-3320d4c87c9b.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 892.554575] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 892.554699] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-01b565f1-bfb3-4d87-94e2-3afa64e54ff0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.561199] env[69328]: DEBUG oslo_vmware.api [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 892.561199] env[69328]: value = "task-3273489" [ 892.561199] env[69328]: _type = "Task" [ 892.561199] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.570054] env[69328]: DEBUG oslo_vmware.api [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273489, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.719573] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 893.071466] env[69328]: DEBUG oslo_vmware.api [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273489, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067473} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.071796] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 893.072553] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c71a26d7-9d0c-4037-98ad-37ea0b199800 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.094383] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Reconfiguring VM instance instance-00000043 to attach disk [datastore1] d017d08e-5f9e-4d05-8914-3320d4c87c9b/d017d08e-5f9e-4d05-8914-3320d4c87c9b.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 893.094638] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aeb60a04-cbac-463c-aee5-329ea37eb4d6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.117416] env[69328]: DEBUG oslo_vmware.api [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 893.117416] env[69328]: value = "task-3273490" [ 893.117416] env[69328]: _type = "Task" [ 893.117416] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.123208] env[69328]: DEBUG oslo_vmware.api [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273490, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.213997] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3271c742-d786-4185-bdef-b60f0827f1f9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.221146] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7b321411-bac2-4cbe-9722-f2b745357bf4 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Doing hard reboot of VM {{(pid=69328) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 893.221908] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 1f568ba1-8591-499b-b1ee-da16e26f81fc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 893.222237] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Total usable vcpus: 48, total allocated vcpus: 13 {{(pid=69328) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 893.222444] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3072MB phys_disk=200GB used_disk=13GB total_vcpus=48 used_vcpus=13 pci_stats=[] {{(pid=69328) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 893.224808] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-2af69613-985a-482e-84f7-76e1b20d2f68 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.230773] env[69328]: DEBUG oslo_vmware.api [None req-7b321411-bac2-4cbe-9722-f2b745357bf4 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Waiting for the task: (returnval){ [ 893.230773] env[69328]: value = "task-3273491" [ 893.230773] env[69328]: _type = "Task" [ 893.230773] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.238850] env[69328]: DEBUG oslo_vmware.api [None req-7b321411-bac2-4cbe-9722-f2b745357bf4 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Task: {'id': task-3273491, 'name': ResetVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.544348] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f4ffedd-cb7d-4724-a12a-81283350a9d1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.552805] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a25f1c70-4558-46c3-b533-6b604898d08e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.585121] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acbf6895-1bd0-4f1a-84ad-95710286739a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.592338] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-611f24f1-c0b6-45d0-9f68-2f327b01bb8a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.605407] env[69328]: DEBUG nova.compute.provider_tree [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 893.624420] env[69328]: DEBUG oslo_vmware.api [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273490, 'name': ReconfigVM_Task, 'duration_secs': 0.301369} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.624698] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Reconfigured VM instance instance-00000043 to attach disk [datastore1] d017d08e-5f9e-4d05-8914-3320d4c87c9b/d017d08e-5f9e-4d05-8914-3320d4c87c9b.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 893.625463] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-40bf5b53-43a2-44b5-b92f-a2dd10a3f7a9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.632018] env[69328]: DEBUG oslo_vmware.api [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 893.632018] env[69328]: value = "task-3273492" [ 893.632018] env[69328]: _type = "Task" [ 893.632018] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.639729] env[69328]: DEBUG oslo_vmware.api [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273492, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.742283] env[69328]: DEBUG oslo_vmware.api [None req-7b321411-bac2-4cbe-9722-f2b745357bf4 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Task: {'id': task-3273491, 'name': ResetVM_Task, 'duration_secs': 0.125706} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.742283] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7b321411-bac2-4cbe-9722-f2b745357bf4 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Did hard reboot of VM {{(pid=69328) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 893.742283] env[69328]: DEBUG nova.compute.manager [None req-7b321411-bac2-4cbe-9722-f2b745357bf4 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 893.742569] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d81e5b8b-bdca-4ced-897f-7f1b122c3c3f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.127342] env[69328]: ERROR nova.scheduler.client.report [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [req-7f984960-f3a3-45b1-be72-0688d605bf22] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-7f984960-f3a3-45b1-be72-0688d605bf22"}]} [ 894.140592] env[69328]: DEBUG oslo_vmware.api [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273492, 'name': Rename_Task, 'duration_secs': 0.143942} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.140906] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 894.141170] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-246bc065-cc43-4f99-973b-a0c824cd4513 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.145216] env[69328]: DEBUG nova.scheduler.client.report [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Refreshing inventories for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 894.150622] env[69328]: DEBUG oslo_vmware.api [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 894.150622] env[69328]: value = "task-3273493" [ 894.150622] env[69328]: _type = "Task" [ 894.150622] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.158446] env[69328]: DEBUG oslo_vmware.api [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273493, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.159877] env[69328]: DEBUG nova.scheduler.client.report [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Updating ProviderTree inventory for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 894.160065] env[69328]: DEBUG nova.compute.provider_tree [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 894.172498] env[69328]: DEBUG nova.scheduler.client.report [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Refreshing aggregate associations for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e, aggregates: None {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 894.194209] env[69328]: DEBUG nova.scheduler.client.report [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Refreshing trait associations for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e, traits: COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 894.254677] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7b321411-bac2-4cbe-9722-f2b745357bf4 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Lock "5a45bd6a-b063-4104-a85a-d78a4bb9452e" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.318s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 894.470463] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0c6b0d1-8c79-4ca2-b093-b07b8f3761da {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.478043] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-856b4aea-e05e-495d-91ad-d8c399c3b436 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.508095] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aea2fc46-7d1b-4910-ac67-f6a559c2c710 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.515654] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eb95bf1-9215-47f7-8087-eed46abbac8d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.530708] env[69328]: DEBUG nova.compute.provider_tree [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 894.555731] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3b4b7cd4-301e-4a1a-86eb-8920abfdcbd3 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Acquiring lock "5a45bd6a-b063-4104-a85a-d78a4bb9452e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 894.556078] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3b4b7cd4-301e-4a1a-86eb-8920abfdcbd3 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Lock "5a45bd6a-b063-4104-a85a-d78a4bb9452e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 894.556365] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3b4b7cd4-301e-4a1a-86eb-8920abfdcbd3 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Acquiring lock "5a45bd6a-b063-4104-a85a-d78a4bb9452e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 894.556577] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3b4b7cd4-301e-4a1a-86eb-8920abfdcbd3 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Lock "5a45bd6a-b063-4104-a85a-d78a4bb9452e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 894.556755] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3b4b7cd4-301e-4a1a-86eb-8920abfdcbd3 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Lock "5a45bd6a-b063-4104-a85a-d78a4bb9452e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 894.559045] env[69328]: INFO nova.compute.manager [None req-3b4b7cd4-301e-4a1a-86eb-8920abfdcbd3 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Terminating instance [ 894.659888] env[69328]: DEBUG oslo_vmware.api [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273493, 'name': PowerOnVM_Task, 'duration_secs': 0.440882} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.660174] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 894.660381] env[69328]: INFO nova.compute.manager [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Took 6.45 seconds to spawn the instance on the hypervisor. [ 894.660562] env[69328]: DEBUG nova.compute.manager [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 894.661320] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dbf058f-1e0a-4912-968d-a8eb20536059 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.063472] env[69328]: DEBUG nova.compute.manager [None req-3b4b7cd4-301e-4a1a-86eb-8920abfdcbd3 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 895.064878] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3b4b7cd4-301e-4a1a-86eb-8920abfdcbd3 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 895.064878] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4d5e21d-46cc-4544-85f4-d28a39ea0426 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.072872] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b4b7cd4-301e-4a1a-86eb-8920abfdcbd3 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 895.073123] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-be54bb1d-5adc-409b-9a69-1700e2706b98 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.077546] env[69328]: DEBUG nova.scheduler.client.report [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Updated inventory for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with generation 101 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 895.077791] env[69328]: DEBUG nova.compute.provider_tree [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Updating resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e generation from 101 to 102 during operation: update_inventory {{(pid=69328) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 895.077943] env[69328]: DEBUG nova.compute.provider_tree [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 895.085065] env[69328]: DEBUG oslo_vmware.api [None req-3b4b7cd4-301e-4a1a-86eb-8920abfdcbd3 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Waiting for the task: (returnval){ [ 895.085065] env[69328]: value = "task-3273494" [ 895.085065] env[69328]: _type = "Task" [ 895.085065] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.094558] env[69328]: DEBUG oslo_vmware.api [None req-3b4b7cd4-301e-4a1a-86eb-8920abfdcbd3 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Task: {'id': task-3273494, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.178815] env[69328]: INFO nova.compute.manager [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Took 46.52 seconds to build instance. [ 895.584433] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69328) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 895.584623] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 7.428s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 895.585027] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 43.977s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 895.585205] env[69328]: DEBUG nova.objects.instance [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69328) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 895.587797] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 895.587947] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Cleaning up deleted instances {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11834}} [ 895.597972] env[69328]: DEBUG oslo_vmware.api [None req-3b4b7cd4-301e-4a1a-86eb-8920abfdcbd3 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Task: {'id': task-3273494, 'name': PowerOffVM_Task, 'duration_secs': 0.176049} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.598237] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b4b7cd4-301e-4a1a-86eb-8920abfdcbd3 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 895.598404] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3b4b7cd4-301e-4a1a-86eb-8920abfdcbd3 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 895.598642] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-15731300-bedb-4d49-beb5-a2883a6e8f3f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.646377] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b6baf6c-f3df-41bd-99df-b57f70e78f73 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.654391] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3c859da1-4818-41dc-b1f1-d459cf0999ed tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Suspending the VM {{(pid=69328) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 895.654656] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-d949ad44-6bfc-4bc4-94c9-a776883fe6ab {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.657651] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3b4b7cd4-301e-4a1a-86eb-8920abfdcbd3 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 895.657867] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3b4b7cd4-301e-4a1a-86eb-8920abfdcbd3 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 895.658056] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b4b7cd4-301e-4a1a-86eb-8920abfdcbd3 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Deleting the datastore file [datastore1] 5a45bd6a-b063-4104-a85a-d78a4bb9452e {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 895.658279] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7a625d60-20df-4691-b0c6-2214f6a9d847 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.661209] env[69328]: DEBUG oslo_vmware.api [None req-3c859da1-4818-41dc-b1f1-d459cf0999ed tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 895.661209] env[69328]: value = "task-3273496" [ 895.661209] env[69328]: _type = "Task" [ 895.661209] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.666021] env[69328]: DEBUG oslo_vmware.api [None req-3b4b7cd4-301e-4a1a-86eb-8920abfdcbd3 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Waiting for the task: (returnval){ [ 895.666021] env[69328]: value = "task-3273497" [ 895.666021] env[69328]: _type = "Task" [ 895.666021] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.672159] env[69328]: DEBUG oslo_vmware.api [None req-3c859da1-4818-41dc-b1f1-d459cf0999ed tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273496, 'name': SuspendVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.677135] env[69328]: DEBUG oslo_vmware.api [None req-3b4b7cd4-301e-4a1a-86eb-8920abfdcbd3 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Task: {'id': task-3273497, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.680657] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f0a95637-a50f-4e03-b390-7eecdfaa1d7b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lock "d017d08e-5f9e-4d05-8914-3320d4c87c9b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.030s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 896.107089] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] There are 49 instances to clean {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11843}} [ 896.107431] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: d045c9ca-71f9-411e-9048-71b36c32f4b2] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 896.173226] env[69328]: DEBUG oslo_vmware.api [None req-3c859da1-4818-41dc-b1f1-d459cf0999ed tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273496, 'name': SuspendVM_Task} progress is 70%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.177921] env[69328]: DEBUG oslo_vmware.api [None req-3b4b7cd4-301e-4a1a-86eb-8920abfdcbd3 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Task: {'id': task-3273497, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.408993} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.178262] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b4b7cd4-301e-4a1a-86eb-8920abfdcbd3 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 896.178403] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3b4b7cd4-301e-4a1a-86eb-8920abfdcbd3 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 896.178541] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3b4b7cd4-301e-4a1a-86eb-8920abfdcbd3 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 896.178709] env[69328]: INFO nova.compute.manager [None req-3b4b7cd4-301e-4a1a-86eb-8920abfdcbd3 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Took 1.12 seconds to destroy the instance on the hypervisor. [ 896.178941] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3b4b7cd4-301e-4a1a-86eb-8920abfdcbd3 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 896.179172] env[69328]: DEBUG nova.compute.manager [-] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 896.179265] env[69328]: DEBUG nova.network.neutron [-] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 896.542400] env[69328]: DEBUG nova.compute.manager [req-eee50fbd-564e-42bd-b74d-339f09b36dee req-0b01430c-5efb-43ae-b63b-9ebf92d2e031 service nova] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Received event network-vif-deleted-7c92f501-6c17-4266-8177-0b568c42e422 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 896.542546] env[69328]: INFO nova.compute.manager [req-eee50fbd-564e-42bd-b74d-339f09b36dee req-0b01430c-5efb-43ae-b63b-9ebf92d2e031 service nova] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Neutron deleted interface 7c92f501-6c17-4266-8177-0b568c42e422; detaching it from the instance and deleting it from the info cache [ 896.542760] env[69328]: DEBUG nova.network.neutron [req-eee50fbd-564e-42bd-b74d-339f09b36dee req-0b01430c-5efb-43ae-b63b-9ebf92d2e031 service nova] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.611204] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c5fab1f9-f53c-49b3-9526-20d2c2cc4551 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.026s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 896.612824] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 3daf7b73-5679-47ce-b847-f3786f1000d4] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 896.614265] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 44.939s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 896.615712] env[69328]: INFO nova.compute.claims [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 896.672018] env[69328]: DEBUG oslo_vmware.api [None req-3c859da1-4818-41dc-b1f1-d459cf0999ed tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273496, 'name': SuspendVM_Task, 'duration_secs': 0.570923} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.672280] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3c859da1-4818-41dc-b1f1-d459cf0999ed tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Suspended the VM {{(pid=69328) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 896.672481] env[69328]: DEBUG nova.compute.manager [None req-3c859da1-4818-41dc-b1f1-d459cf0999ed tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 896.673323] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5691783-9ed8-4665-ab34-ece0aabb26f0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.996144] env[69328]: DEBUG nova.network.neutron [-] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 897.045821] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d71c732c-c9c6-4281-b096-5aa7111538a1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.056203] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d046989-be73-4907-ab4e-de946d2ddc3a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.088902] env[69328]: DEBUG nova.compute.manager [req-eee50fbd-564e-42bd-b74d-339f09b36dee req-0b01430c-5efb-43ae-b63b-9ebf92d2e031 service nova] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Detach interface failed, port_id=7c92f501-6c17-4266-8177-0b568c42e422, reason: Instance 5a45bd6a-b063-4104-a85a-d78a4bb9452e could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 897.119136] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 62fa6807-f67d-4bf5-ba23-9e97f9da120e] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 897.498387] env[69328]: INFO nova.compute.manager [-] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Took 1.32 seconds to deallocate network for instance. [ 897.623908] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: b6951d01-31ac-4a8e-b70a-bbf6bd25fb7b] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 897.927738] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-683cf53d-51f7-41e3-b047-637e8ee29a93 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.936755] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91b24aca-cb4a-4ce2-8c73-ce1e02ff386c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.966556] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4877b234-4712-41ca-948d-4a3f35982561 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.973790] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5bcf43a-b62d-45e0-8e10-cfcc927045f9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.986785] env[69328]: DEBUG nova.compute.provider_tree [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 898.005304] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3b4b7cd4-301e-4a1a-86eb-8920abfdcbd3 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 898.129207] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: ef7effe4-b37f-4fab-ad24-9d8f72a47ee2] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 898.489752] env[69328]: DEBUG nova.scheduler.client.report [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 898.632971] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 07b1f872-02bc-471f-97d6-3a781075bee5] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 898.857634] env[69328]: DEBUG nova.compute.manager [None req-e6d81449-25cd-45e1-8e17-bfe7322d4637 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 898.858580] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5be26c4-74ca-4a6e-8469-a69f4e9b793f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.995075] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.381s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 898.995623] env[69328]: DEBUG nova.compute.manager [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 898.998648] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e810f54a-ff1a-4673-a691-033e65f1d8b1 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 42.178s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 898.998849] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e810f54a-ff1a-4673-a691-033e65f1d8b1 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 899.000832] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.724s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 899.002521] env[69328]: INFO nova.compute.claims [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 899.029246] env[69328]: INFO nova.scheduler.client.report [None req-e810f54a-ff1a-4673-a691-033e65f1d8b1 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Deleted allocations for instance 4a990411-16cd-4e53-9068-29654b69abe6 [ 899.137636] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 55f44102-2891-4b6c-b31e-e8255a24d180] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 899.369942] env[69328]: INFO nova.compute.manager [None req-e6d81449-25cd-45e1-8e17-bfe7322d4637 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] instance snapshotting [ 899.370276] env[69328]: WARNING nova.compute.manager [None req-e6d81449-25cd-45e1-8e17-bfe7322d4637 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 899.373040] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-359e92d9-5c70-4de4-af5f-7628277e0296 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.393794] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a190525c-5297-4148-bbb1-39d08da1d6af {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.507106] env[69328]: DEBUG nova.compute.utils [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 899.511204] env[69328]: DEBUG nova.compute.manager [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 899.511416] env[69328]: DEBUG nova.network.neutron [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 899.537208] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e810f54a-ff1a-4673-a691-033e65f1d8b1 tempest-ServersListShow296Test-1109314957 tempest-ServersListShow296Test-1109314957-project-member] Lock "4a990411-16cd-4e53-9068-29654b69abe6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 47.011s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 899.562235] env[69328]: DEBUG nova.policy [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c1d18e6b9e284403a091afd2c3e31c1c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f357b5a9494b4849a83aa934c5d4e26b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 899.641525] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: d10bee67-6294-4537-9ce7-4eedb8361ddc] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 899.885135] env[69328]: DEBUG nova.network.neutron [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Successfully created port: 1018560a-13d7-4d01-8fc4-03d0b9beab90 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 899.904520] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e6d81449-25cd-45e1-8e17-bfe7322d4637 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Creating Snapshot of the VM instance {{(pid=69328) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 899.904882] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-721eaa29-6043-4e32-984a-e82866673ec9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.913027] env[69328]: DEBUG oslo_vmware.api [None req-e6d81449-25cd-45e1-8e17-bfe7322d4637 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 899.913027] env[69328]: value = "task-3273498" [ 899.913027] env[69328]: _type = "Task" [ 899.913027] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.921088] env[69328]: DEBUG oslo_vmware.api [None req-e6d81449-25cd-45e1-8e17-bfe7322d4637 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273498, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.017786] env[69328]: DEBUG nova.compute.manager [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 900.145056] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: f02c5ee6-6cde-43d1-9db6-8d5b60cc98fe] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 900.372785] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e8dfc97-d9c5-44b2-a9d5-10b1b15423b3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.380617] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7f8e665-eb21-44df-b61f-6cee6386c44a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.411785] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-690f859a-c4fe-49db-bf18-c2274a3d2b13 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.427838] env[69328]: DEBUG oslo_vmware.api [None req-e6d81449-25cd-45e1-8e17-bfe7322d4637 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273498, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.429238] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85a6005d-6317-42ab-86bf-deaf3ff8303c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.446245] env[69328]: DEBUG nova.compute.provider_tree [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 900.648598] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: d724a141-35e7-4483-99aa-8a17066fb63b] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 900.924211] env[69328]: DEBUG oslo_vmware.api [None req-e6d81449-25cd-45e1-8e17-bfe7322d4637 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273498, 'name': CreateSnapshot_Task, 'duration_secs': 0.590353} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.924481] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e6d81449-25cd-45e1-8e17-bfe7322d4637 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Created Snapshot of the VM instance {{(pid=69328) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 900.925240] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-361b1817-8cba-494a-84e9-73fb03ab0202 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.952966] env[69328]: DEBUG nova.scheduler.client.report [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 901.026662] env[69328]: DEBUG nova.compute.manager [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 901.056853] env[69328]: DEBUG nova.virt.hardware [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 901.056853] env[69328]: DEBUG nova.virt.hardware [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 901.056853] env[69328]: DEBUG nova.virt.hardware [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 901.056853] env[69328]: DEBUG nova.virt.hardware [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 901.057045] env[69328]: DEBUG nova.virt.hardware [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 901.057045] env[69328]: DEBUG nova.virt.hardware [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 901.059480] env[69328]: DEBUG nova.virt.hardware [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 901.059480] env[69328]: DEBUG nova.virt.hardware [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 901.059480] env[69328]: DEBUG nova.virt.hardware [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 901.059480] env[69328]: DEBUG nova.virt.hardware [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 901.059480] env[69328]: DEBUG nova.virt.hardware [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 901.059480] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2ed3764-83a4-42d5-b9da-42a6d4760438 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.066922] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6147c2fc-1d1f-4c7e-a9f7-2b2790611275 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.152173] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 99e31dfd-5d41-4564-886f-becc25ca289c] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 901.398315] env[69328]: DEBUG nova.compute.manager [req-c6c9fb79-426c-4de5-bb42-88e1e024886e req-b0da8e73-4bde-45d8-abb6-84192738a75d service nova] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Received event network-vif-plugged-1018560a-13d7-4d01-8fc4-03d0b9beab90 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 901.398558] env[69328]: DEBUG oslo_concurrency.lockutils [req-c6c9fb79-426c-4de5-bb42-88e1e024886e req-b0da8e73-4bde-45d8-abb6-84192738a75d service nova] Acquiring lock "f1be93b2-08db-41fe-87c4-f4e5f964cfa4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 901.399897] env[69328]: DEBUG oslo_concurrency.lockutils [req-c6c9fb79-426c-4de5-bb42-88e1e024886e req-b0da8e73-4bde-45d8-abb6-84192738a75d service nova] Lock "f1be93b2-08db-41fe-87c4-f4e5f964cfa4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 901.399983] env[69328]: DEBUG oslo_concurrency.lockutils [req-c6c9fb79-426c-4de5-bb42-88e1e024886e req-b0da8e73-4bde-45d8-abb6-84192738a75d service nova] Lock "f1be93b2-08db-41fe-87c4-f4e5f964cfa4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 901.400173] env[69328]: DEBUG nova.compute.manager [req-c6c9fb79-426c-4de5-bb42-88e1e024886e req-b0da8e73-4bde-45d8-abb6-84192738a75d service nova] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] No waiting events found dispatching network-vif-plugged-1018560a-13d7-4d01-8fc4-03d0b9beab90 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 901.400759] env[69328]: WARNING nova.compute.manager [req-c6c9fb79-426c-4de5-bb42-88e1e024886e req-b0da8e73-4bde-45d8-abb6-84192738a75d service nova] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Received unexpected event network-vif-plugged-1018560a-13d7-4d01-8fc4-03d0b9beab90 for instance with vm_state building and task_state spawning. [ 901.443883] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e6d81449-25cd-45e1-8e17-bfe7322d4637 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Creating linked-clone VM from snapshot {{(pid=69328) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 901.444211] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-b8fb932d-3fd1-4329-975b-d9a977c4662c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.456020] env[69328]: DEBUG oslo_vmware.api [None req-e6d81449-25cd-45e1-8e17-bfe7322d4637 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 901.456020] env[69328]: value = "task-3273499" [ 901.456020] env[69328]: _type = "Task" [ 901.456020] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.461408] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.460s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 901.461954] env[69328]: DEBUG nova.compute.manager [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 901.467280] env[69328]: DEBUG oslo_vmware.api [None req-e6d81449-25cd-45e1-8e17-bfe7322d4637 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273499, 'name': CloneVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.467280] env[69328]: DEBUG oslo_concurrency.lockutils [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 37.490s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 901.467280] env[69328]: DEBUG nova.objects.instance [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69328) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 901.620835] env[69328]: DEBUG nova.network.neutron [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Successfully updated port: 1018560a-13d7-4d01-8fc4-03d0b9beab90 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 901.657027] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 690096cf-a0bd-4db1-ad97-8d8a37ad7c84] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 901.965993] env[69328]: DEBUG oslo_vmware.api [None req-e6d81449-25cd-45e1-8e17-bfe7322d4637 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273499, 'name': CloneVM_Task} progress is 94%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.969909] env[69328]: DEBUG nova.compute.utils [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 901.969909] env[69328]: DEBUG nova.compute.manager [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 901.969909] env[69328]: DEBUG nova.network.neutron [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 902.037927] env[69328]: DEBUG nova.policy [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f4dc7c56b3cb4e5b943b54ebe16a23df', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1393040bf5304571ae4b66d0a4ee7b6e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 902.123913] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "refresh_cache-f1be93b2-08db-41fe-87c4-f4e5f964cfa4" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.124089] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquired lock "refresh_cache-f1be93b2-08db-41fe-87c4-f4e5f964cfa4" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 902.124242] env[69328]: DEBUG nova.network.neutron [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 902.161290] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: afa25f89-ccda-4b77-aaa1-a3b62b53d870] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 902.433907] env[69328]: DEBUG nova.network.neutron [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Successfully created port: c74af0b7-ebfb-4563-9208-a18235899a6c {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 902.467626] env[69328]: DEBUG oslo_vmware.api [None req-e6d81449-25cd-45e1-8e17-bfe7322d4637 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273499, 'name': CloneVM_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.480406] env[69328]: DEBUG nova.compute.manager [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 902.484490] env[69328]: DEBUG oslo_concurrency.lockutils [None req-69bc84b8-bcf5-476d-81ef-08d505cb45fd tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.018s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 902.489023] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 35.488s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 902.666189] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 5292b759-9d1f-486a-b4d6-90519b3ae986] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 902.681255] env[69328]: DEBUG nova.network.neutron [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 902.854147] env[69328]: DEBUG nova.network.neutron [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Updating instance_info_cache with network_info: [{"id": "1018560a-13d7-4d01-8fc4-03d0b9beab90", "address": "fa:16:3e:33:ba:27", "network": {"id": "4031def8-0553-461f-9528-aa338b9d7b2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1834835647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f357b5a9494b4849a83aa934c5d4e26b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "357d2811-e990-4985-9f9e-b158d10d3699", "external-id": "nsx-vlan-transportzone-641", "segmentation_id": 641, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1018560a-13", "ovs_interfaceid": "1018560a-13d7-4d01-8fc4-03d0b9beab90", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 902.964709] env[69328]: DEBUG oslo_vmware.api [None req-e6d81449-25cd-45e1-8e17-bfe7322d4637 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273499, 'name': CloneVM_Task, 'duration_secs': 1.049734} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.964956] env[69328]: INFO nova.virt.vmwareapi.vmops [None req-e6d81449-25cd-45e1-8e17-bfe7322d4637 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Created linked-clone VM from snapshot [ 902.965689] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcbeed29-5321-4cf5-a19d-378e6751dc8a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.974261] env[69328]: DEBUG nova.virt.vmwareapi.images [None req-e6d81449-25cd-45e1-8e17-bfe7322d4637 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Uploading image 73e0a420-6bf5-4ed4-835c-91d5b10628ee {{(pid=69328) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 902.995910] env[69328]: INFO nova.compute.claims [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 903.006548] env[69328]: DEBUG oslo_vmware.rw_handles [None req-e6d81449-25cd-45e1-8e17-bfe7322d4637 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 903.006548] env[69328]: value = "vm-653847" [ 903.006548] env[69328]: _type = "VirtualMachine" [ 903.006548] env[69328]: }. {{(pid=69328) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 903.007087] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-49199f70-1c89-44be-be95-2f8f191a4198 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.016193] env[69328]: DEBUG oslo_vmware.rw_handles [None req-e6d81449-25cd-45e1-8e17-bfe7322d4637 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lease: (returnval){ [ 903.016193] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b20373-26db-405b-c35a-44e9805c0433" [ 903.016193] env[69328]: _type = "HttpNfcLease" [ 903.016193] env[69328]: } obtained for exporting VM: (result){ [ 903.016193] env[69328]: value = "vm-653847" [ 903.016193] env[69328]: _type = "VirtualMachine" [ 903.016193] env[69328]: }. {{(pid=69328) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 903.016193] env[69328]: DEBUG oslo_vmware.api [None req-e6d81449-25cd-45e1-8e17-bfe7322d4637 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the lease: (returnval){ [ 903.016193] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b20373-26db-405b-c35a-44e9805c0433" [ 903.016193] env[69328]: _type = "HttpNfcLease" [ 903.016193] env[69328]: } to be ready. {{(pid=69328) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 903.025415] env[69328]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 903.025415] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b20373-26db-405b-c35a-44e9805c0433" [ 903.025415] env[69328]: _type = "HttpNfcLease" [ 903.025415] env[69328]: } is initializing. {{(pid=69328) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 903.169424] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: c465c53f-d96b-461b-b8ff-b19929b4f789] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 903.366020] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Releasing lock "refresh_cache-f1be93b2-08db-41fe-87c4-f4e5f964cfa4" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 903.366020] env[69328]: DEBUG nova.compute.manager [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Instance network_info: |[{"id": "1018560a-13d7-4d01-8fc4-03d0b9beab90", "address": "fa:16:3e:33:ba:27", "network": {"id": "4031def8-0553-461f-9528-aa338b9d7b2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1834835647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f357b5a9494b4849a83aa934c5d4e26b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "357d2811-e990-4985-9f9e-b158d10d3699", "external-id": "nsx-vlan-transportzone-641", "segmentation_id": 641, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1018560a-13", "ovs_interfaceid": "1018560a-13d7-4d01-8fc4-03d0b9beab90", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 903.366020] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:33:ba:27', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '357d2811-e990-4985-9f9e-b158d10d3699', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1018560a-13d7-4d01-8fc4-03d0b9beab90', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 903.372444] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Creating folder: Project (f357b5a9494b4849a83aa934c5d4e26b). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 903.372759] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0ea26bf6-8655-44d2-8c63-83942be3506a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.384580] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Created folder: Project (f357b5a9494b4849a83aa934c5d4e26b) in parent group-v653649. [ 903.384771] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Creating folder: Instances. Parent ref: group-v653848. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 903.385012] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-53e2f9cb-737e-4f53-b523-fdf0f8fc39e8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.395137] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Created folder: Instances in parent group-v653848. [ 903.395385] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 903.395572] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 903.395773] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6a77eac6-f9b0-4bdf-8e6c-8513fd4fcb1d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.418685] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 903.418685] env[69328]: value = "task-3273503" [ 903.418685] env[69328]: _type = "Task" [ 903.418685] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.429034] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273503, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.444264] env[69328]: DEBUG nova.compute.manager [req-075d41e8-3673-4be4-963a-a71d2156df46 req-92d46de8-e09e-42b1-b63b-256948ed5e28 service nova] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Received event network-changed-1018560a-13d7-4d01-8fc4-03d0b9beab90 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 903.444264] env[69328]: DEBUG nova.compute.manager [req-075d41e8-3673-4be4-963a-a71d2156df46 req-92d46de8-e09e-42b1-b63b-256948ed5e28 service nova] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Refreshing instance network info cache due to event network-changed-1018560a-13d7-4d01-8fc4-03d0b9beab90. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 903.444264] env[69328]: DEBUG oslo_concurrency.lockutils [req-075d41e8-3673-4be4-963a-a71d2156df46 req-92d46de8-e09e-42b1-b63b-256948ed5e28 service nova] Acquiring lock "refresh_cache-f1be93b2-08db-41fe-87c4-f4e5f964cfa4" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 903.444264] env[69328]: DEBUG oslo_concurrency.lockutils [req-075d41e8-3673-4be4-963a-a71d2156df46 req-92d46de8-e09e-42b1-b63b-256948ed5e28 service nova] Acquired lock "refresh_cache-f1be93b2-08db-41fe-87c4-f4e5f964cfa4" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 903.444264] env[69328]: DEBUG nova.network.neutron [req-075d41e8-3673-4be4-963a-a71d2156df46 req-92d46de8-e09e-42b1-b63b-256948ed5e28 service nova] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Refreshing network info cache for port 1018560a-13d7-4d01-8fc4-03d0b9beab90 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 903.500658] env[69328]: DEBUG nova.compute.manager [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 903.504191] env[69328]: INFO nova.compute.resource_tracker [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Updating resource usage from migration e58a0bcf-9f68-4aec-b3e1-10cfbcb5ca51 [ 903.523368] env[69328]: DEBUG nova.virt.hardware [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 903.523628] env[69328]: DEBUG nova.virt.hardware [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 903.523702] env[69328]: DEBUG nova.virt.hardware [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 903.523892] env[69328]: DEBUG nova.virt.hardware [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 903.524065] env[69328]: DEBUG nova.virt.hardware [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 903.524220] env[69328]: DEBUG nova.virt.hardware [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 903.524429] env[69328]: DEBUG nova.virt.hardware [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 903.524596] env[69328]: DEBUG nova.virt.hardware [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 903.524763] env[69328]: DEBUG nova.virt.hardware [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 903.524936] env[69328]: DEBUG nova.virt.hardware [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 903.525118] env[69328]: DEBUG nova.virt.hardware [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 903.526207] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69323ae4-b39e-46d6-9645-32c41dd1afc0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.532667] env[69328]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 903.532667] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b20373-26db-405b-c35a-44e9805c0433" [ 903.532667] env[69328]: _type = "HttpNfcLease" [ 903.532667] env[69328]: } is ready. {{(pid=69328) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 903.533414] env[69328]: DEBUG oslo_vmware.rw_handles [None req-e6d81449-25cd-45e1-8e17-bfe7322d4637 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 903.533414] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b20373-26db-405b-c35a-44e9805c0433" [ 903.533414] env[69328]: _type = "HttpNfcLease" [ 903.533414] env[69328]: }. {{(pid=69328) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 903.534163] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df8e15a1-4238-47ac-90b9-1b12f7b1dcec {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.540264] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f86a1d16-9f2a-44a0-a566-22edfb2fcf6a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.548940] env[69328]: DEBUG oslo_vmware.rw_handles [None req-e6d81449-25cd-45e1-8e17-bfe7322d4637 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52127d3e-94d4-f421-a530-4eb679c4bb49/disk-0.vmdk from lease info. {{(pid=69328) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 903.548940] env[69328]: DEBUG oslo_vmware.rw_handles [None req-e6d81449-25cd-45e1-8e17-bfe7322d4637 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52127d3e-94d4-f421-a530-4eb679c4bb49/disk-0.vmdk for reading. {{(pid=69328) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 903.653022] env[69328]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d96f36af-cfe5-49b0-ada8-8fc737ace0e1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.675248] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 3923403b-2e8f-4033-89ee-9a907aff1d49] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 903.928995] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273503, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.986831] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab7f5c5e-37b9-47fe-b8f1-6588ee586f63 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.997464] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a22c825-356a-492c-a86f-f540240895f6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.029765] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e14cd3ea-c4d1-477b-a211-93388bbdeeab {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.037982] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be8addca-d939-4549-8c96-ea34face17c9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.056439] env[69328]: DEBUG nova.compute.provider_tree [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 904.178864] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 732342ea-2f73-40ea-a826-883ddc7a385a] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 904.203961] env[69328]: DEBUG nova.network.neutron [req-075d41e8-3673-4be4-963a-a71d2156df46 req-92d46de8-e09e-42b1-b63b-256948ed5e28 service nova] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Updated VIF entry in instance network info cache for port 1018560a-13d7-4d01-8fc4-03d0b9beab90. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 904.204351] env[69328]: DEBUG nova.network.neutron [req-075d41e8-3673-4be4-963a-a71d2156df46 req-92d46de8-e09e-42b1-b63b-256948ed5e28 service nova] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Updating instance_info_cache with network_info: [{"id": "1018560a-13d7-4d01-8fc4-03d0b9beab90", "address": "fa:16:3e:33:ba:27", "network": {"id": "4031def8-0553-461f-9528-aa338b9d7b2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1834835647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f357b5a9494b4849a83aa934c5d4e26b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "357d2811-e990-4985-9f9e-b158d10d3699", "external-id": "nsx-vlan-transportzone-641", "segmentation_id": 641, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1018560a-13", "ovs_interfaceid": "1018560a-13d7-4d01-8fc4-03d0b9beab90", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 904.206329] env[69328]: DEBUG nova.network.neutron [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Successfully updated port: c74af0b7-ebfb-4563-9208-a18235899a6c {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 904.271447] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Acquiring lock "76210566-12d7-4f6a-afa1-6329e87e0f85" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 904.271447] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lock "76210566-12d7-4f6a-afa1-6329e87e0f85" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 904.429859] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273503, 'name': CreateVM_Task, 'duration_secs': 0.566095} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.430120] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 904.430913] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 904.431151] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 904.431621] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 904.432385] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d83ee639-bedf-4b38-be64-eb1ae2a68a5a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.437779] env[69328]: DEBUG oslo_vmware.api [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 904.437779] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]523b6b94-3ee6-be3a-4464-0a2214b136f9" [ 904.437779] env[69328]: _type = "Task" [ 904.437779] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.448189] env[69328]: DEBUG oslo_vmware.api [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]523b6b94-3ee6-be3a-4464-0a2214b136f9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.562190] env[69328]: DEBUG nova.scheduler.client.report [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 904.683850] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: e5d3df12-5334-44c8-9a44-1674e57918bb] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 904.709042] env[69328]: DEBUG oslo_concurrency.lockutils [req-075d41e8-3673-4be4-963a-a71d2156df46 req-92d46de8-e09e-42b1-b63b-256948ed5e28 service nova] Releasing lock "refresh_cache-f1be93b2-08db-41fe-87c4-f4e5f964cfa4" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 904.709196] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Acquiring lock "refresh_cache-a0952fdf-5570-4112-bc4d-e9f9cee1599c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 904.709300] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Acquired lock "refresh_cache-a0952fdf-5570-4112-bc4d-e9f9cee1599c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 904.709479] env[69328]: DEBUG nova.network.neutron [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 904.773643] env[69328]: DEBUG nova.compute.manager [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 904.949087] env[69328]: DEBUG oslo_vmware.api [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]523b6b94-3ee6-be3a-4464-0a2214b136f9, 'name': SearchDatastore_Task, 'duration_secs': 0.009758} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.949473] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 904.949764] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 904.950056] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 904.950241] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 904.950457] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 904.950755] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0d33f8c4-7983-4fc5-a492-49d6af18325d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.960068] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 904.960320] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 904.961059] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c35028f0-a08f-4af1-a84a-7a6c1b735c79 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.966713] env[69328]: DEBUG oslo_vmware.api [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 904.966713] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]526a8154-d881-1c03-d2a5-ba4512cd73c5" [ 904.966713] env[69328]: _type = "Task" [ 904.966713] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.975436] env[69328]: DEBUG oslo_vmware.api [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]526a8154-d881-1c03-d2a5-ba4512cd73c5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.070822] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.585s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 905.071033] env[69328]: INFO nova.compute.manager [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Migrating [ 905.078227] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.865s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 905.079777] env[69328]: INFO nova.compute.claims [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 905.187705] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 4c54c0dd-32f1-4d35-b770-3e1a540c54a7] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 905.241217] env[69328]: DEBUG nova.network.neutron [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 905.302468] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 905.376497] env[69328]: DEBUG nova.network.neutron [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Updating instance_info_cache with network_info: [{"id": "c74af0b7-ebfb-4563-9208-a18235899a6c", "address": "fa:16:3e:35:bb:fc", "network": {"id": "cc75e08f-f0f3-4b52-9b40-0de73f044554", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1326858830-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1393040bf5304571ae4b66d0a4ee7b6e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc74af0b7-eb", "ovs_interfaceid": "c74af0b7-ebfb-4563-9208-a18235899a6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 905.478322] env[69328]: DEBUG oslo_vmware.api [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]526a8154-d881-1c03-d2a5-ba4512cd73c5, 'name': SearchDatastore_Task, 'duration_secs': 0.013167} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.479293] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bcc72bf7-e8c4-4bad-a208-535d269d7c5c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.484468] env[69328]: DEBUG nova.compute.manager [req-f6e53414-3213-4a24-a616-fd56a5758684 req-dd943edc-8b81-48e1-8f6d-62b134fbf306 service nova] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Received event network-vif-plugged-c74af0b7-ebfb-4563-9208-a18235899a6c {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 905.484760] env[69328]: DEBUG oslo_concurrency.lockutils [req-f6e53414-3213-4a24-a616-fd56a5758684 req-dd943edc-8b81-48e1-8f6d-62b134fbf306 service nova] Acquiring lock "a0952fdf-5570-4112-bc4d-e9f9cee1599c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 905.485018] env[69328]: DEBUG oslo_concurrency.lockutils [req-f6e53414-3213-4a24-a616-fd56a5758684 req-dd943edc-8b81-48e1-8f6d-62b134fbf306 service nova] Lock "a0952fdf-5570-4112-bc4d-e9f9cee1599c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 905.485233] env[69328]: DEBUG oslo_concurrency.lockutils [req-f6e53414-3213-4a24-a616-fd56a5758684 req-dd943edc-8b81-48e1-8f6d-62b134fbf306 service nova] Lock "a0952fdf-5570-4112-bc4d-e9f9cee1599c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 905.485468] env[69328]: DEBUG nova.compute.manager [req-f6e53414-3213-4a24-a616-fd56a5758684 req-dd943edc-8b81-48e1-8f6d-62b134fbf306 service nova] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] No waiting events found dispatching network-vif-plugged-c74af0b7-ebfb-4563-9208-a18235899a6c {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 905.485658] env[69328]: WARNING nova.compute.manager [req-f6e53414-3213-4a24-a616-fd56a5758684 req-dd943edc-8b81-48e1-8f6d-62b134fbf306 service nova] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Received unexpected event network-vif-plugged-c74af0b7-ebfb-4563-9208-a18235899a6c for instance with vm_state building and task_state spawning. [ 905.485852] env[69328]: DEBUG nova.compute.manager [req-f6e53414-3213-4a24-a616-fd56a5758684 req-dd943edc-8b81-48e1-8f6d-62b134fbf306 service nova] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Received event network-changed-c74af0b7-ebfb-4563-9208-a18235899a6c {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 905.486085] env[69328]: DEBUG nova.compute.manager [req-f6e53414-3213-4a24-a616-fd56a5758684 req-dd943edc-8b81-48e1-8f6d-62b134fbf306 service nova] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Refreshing instance network info cache due to event network-changed-c74af0b7-ebfb-4563-9208-a18235899a6c. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 905.486279] env[69328]: DEBUG oslo_concurrency.lockutils [req-f6e53414-3213-4a24-a616-fd56a5758684 req-dd943edc-8b81-48e1-8f6d-62b134fbf306 service nova] Acquiring lock "refresh_cache-a0952fdf-5570-4112-bc4d-e9f9cee1599c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 905.489924] env[69328]: DEBUG oslo_vmware.api [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 905.489924] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]527db304-dee1-1ca9-8074-9d626750a9ce" [ 905.489924] env[69328]: _type = "Task" [ 905.489924] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.499521] env[69328]: DEBUG oslo_vmware.api [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]527db304-dee1-1ca9-8074-9d626750a9ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.593952] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "refresh_cache-fd72bae3-cb72-48d0-a0df-9ea3a770a86c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 905.594145] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquired lock "refresh_cache-fd72bae3-cb72-48d0-a0df-9ea3a770a86c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 905.594809] env[69328]: DEBUG nova.network.neutron [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 905.690643] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 146a3eef-0971-4f6e-bd24-58b38a1de0ed] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 905.880036] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Releasing lock "refresh_cache-a0952fdf-5570-4112-bc4d-e9f9cee1599c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 905.880036] env[69328]: DEBUG nova.compute.manager [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Instance network_info: |[{"id": "c74af0b7-ebfb-4563-9208-a18235899a6c", "address": "fa:16:3e:35:bb:fc", "network": {"id": "cc75e08f-f0f3-4b52-9b40-0de73f044554", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1326858830-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1393040bf5304571ae4b66d0a4ee7b6e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc74af0b7-eb", "ovs_interfaceid": "c74af0b7-ebfb-4563-9208-a18235899a6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 905.880284] env[69328]: DEBUG oslo_concurrency.lockutils [req-f6e53414-3213-4a24-a616-fd56a5758684 req-dd943edc-8b81-48e1-8f6d-62b134fbf306 service nova] Acquired lock "refresh_cache-a0952fdf-5570-4112-bc4d-e9f9cee1599c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 905.880409] env[69328]: DEBUG nova.network.neutron [req-f6e53414-3213-4a24-a616-fd56a5758684 req-dd943edc-8b81-48e1-8f6d-62b134fbf306 service nova] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Refreshing network info cache for port c74af0b7-ebfb-4563-9208-a18235899a6c {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 905.881913] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:35:bb:fc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '10b81051-1eb1-406b-888c-4548c470c77e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c74af0b7-ebfb-4563-9208-a18235899a6c', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 905.891502] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Creating folder: Project (1393040bf5304571ae4b66d0a4ee7b6e). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 905.894784] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4988522d-3b0b-4254-9851-e5fe38a59473 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.907623] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Created folder: Project (1393040bf5304571ae4b66d0a4ee7b6e) in parent group-v653649. [ 905.907837] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Creating folder: Instances. Parent ref: group-v653851. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 905.908089] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a7229960-2156-4abf-8919-eba3c88c1dad {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.918029] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Created folder: Instances in parent group-v653851. [ 905.920682] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 905.920682] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 905.920682] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d1a36bbb-ce43-45b7-b89c-47e0277f000c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.942492] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 905.942492] env[69328]: value = "task-3273506" [ 905.942492] env[69328]: _type = "Task" [ 905.942492] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.953025] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273506, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.999162] env[69328]: DEBUG oslo_vmware.api [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]527db304-dee1-1ca9-8074-9d626750a9ce, 'name': SearchDatastore_Task, 'duration_secs': 0.012121} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.999477] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 905.999821] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] f1be93b2-08db-41fe-87c4-f4e5f964cfa4/f1be93b2-08db-41fe-87c4-f4e5f964cfa4.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 906.000100] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cea46218-c86b-43b1-a590-ae2baaea3fc7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.006847] env[69328]: DEBUG oslo_vmware.api [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 906.006847] env[69328]: value = "task-3273507" [ 906.006847] env[69328]: _type = "Task" [ 906.006847] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.017448] env[69328]: DEBUG oslo_vmware.api [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3273507, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.144729] env[69328]: DEBUG nova.network.neutron [req-f6e53414-3213-4a24-a616-fd56a5758684 req-dd943edc-8b81-48e1-8f6d-62b134fbf306 service nova] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Updated VIF entry in instance network info cache for port c74af0b7-ebfb-4563-9208-a18235899a6c. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 906.145279] env[69328]: DEBUG nova.network.neutron [req-f6e53414-3213-4a24-a616-fd56a5758684 req-dd943edc-8b81-48e1-8f6d-62b134fbf306 service nova] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Updating instance_info_cache with network_info: [{"id": "c74af0b7-ebfb-4563-9208-a18235899a6c", "address": "fa:16:3e:35:bb:fc", "network": {"id": "cc75e08f-f0f3-4b52-9b40-0de73f044554", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1326858830-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1393040bf5304571ae4b66d0a4ee7b6e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc74af0b7-eb", "ovs_interfaceid": "c74af0b7-ebfb-4563-9208-a18235899a6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 906.197545] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 1e7e9e6e-c084-480c-8653-8441c13d7514] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 906.351558] env[69328]: DEBUG nova.network.neutron [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Updating instance_info_cache with network_info: [{"id": "eebd5d04-278d-4e22-9e5d-df5ae37877cf", "address": "fa:16:3e:d8:d9:39", "network": {"id": "e2ab6957-2c9d-4b95-91bd-5a62d9a284ba", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-967470666-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75d5853e3c724d02bacfa75173e38ab3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abcf0d10-3f3f-45dc-923e-1c78766e2dad", "external-id": "nsx-vlan-transportzone-405", "segmentation_id": 405, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeebd5d04-27", "ovs_interfaceid": "eebd5d04-278d-4e22-9e5d-df5ae37877cf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 906.458594] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273506, 'name': CreateVM_Task, 'duration_secs': 0.354682} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.458780] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 906.459556] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.459721] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 906.460079] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 906.460354] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e0f54e7-9b84-48ed-be39-c848d5f1afa4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.467319] env[69328]: DEBUG oslo_vmware.api [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Waiting for the task: (returnval){ [ 906.467319] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d9ee9f-5958-4dad-5d02-84fe7c5b2577" [ 906.467319] env[69328]: _type = "Task" [ 906.467319] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.476062] env[69328]: DEBUG oslo_vmware.api [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d9ee9f-5958-4dad-5d02-84fe7c5b2577, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.506049] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15be58d8-a83f-4953-b2b9-3917682300a9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.520623] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ead7883-b732-4dee-acc2-743cddad5b5e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.524795] env[69328]: DEBUG oslo_vmware.api [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3273507, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.555170] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-070203d8-706d-4fae-8853-22a7b3a2adc9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.564279] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15a0ad28-1d30-4dcd-b90b-7ef86591f04d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.578807] env[69328]: DEBUG nova.compute.provider_tree [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 906.650136] env[69328]: DEBUG oslo_concurrency.lockutils [req-f6e53414-3213-4a24-a616-fd56a5758684 req-dd943edc-8b81-48e1-8f6d-62b134fbf306 service nova] Releasing lock "refresh_cache-a0952fdf-5570-4112-bc4d-e9f9cee1599c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 906.701890] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: e1eec0ce-8df7-402a-b628-5dfdc11949e7] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 906.856444] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Releasing lock "refresh_cache-fd72bae3-cb72-48d0-a0df-9ea3a770a86c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 906.979117] env[69328]: DEBUG oslo_vmware.api [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d9ee9f-5958-4dad-5d02-84fe7c5b2577, 'name': SearchDatastore_Task, 'duration_secs': 0.058191} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.980105] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 906.980105] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 906.980105] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.980105] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 906.980329] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 906.980535] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4c46466b-48b1-46c8-843e-fc5b1184badd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.995200] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 906.995403] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 906.996186] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54bb47d2-ba08-46cb-a501-8a871367c653 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.001635] env[69328]: DEBUG oslo_vmware.api [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Waiting for the task: (returnval){ [ 907.001635] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52050149-41f2-4dd9-cee7-45a231dd73d3" [ 907.001635] env[69328]: _type = "Task" [ 907.001635] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.010042] env[69328]: DEBUG oslo_vmware.api [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52050149-41f2-4dd9-cee7-45a231dd73d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.017059] env[69328]: DEBUG oslo_vmware.api [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3273507, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.082288] env[69328]: DEBUG nova.scheduler.client.report [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 907.205377] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 8e3a73c1-b622-47f4-99af-71b6dba7c09b] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 907.512137] env[69328]: DEBUG oslo_vmware.api [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52050149-41f2-4dd9-cee7-45a231dd73d3, 'name': SearchDatastore_Task, 'duration_secs': 0.029842} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.516088] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe437b73-6fd1-4231-97c3-14c3c031746e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.523152] env[69328]: DEBUG oslo_vmware.api [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3273507, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.524448] env[69328]: DEBUG oslo_vmware.api [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Waiting for the task: (returnval){ [ 907.524448] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f57cd7-c20f-ce08-de54-4ba0fabc72fb" [ 907.524448] env[69328]: _type = "Task" [ 907.524448] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.532088] env[69328]: DEBUG oslo_vmware.api [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f57cd7-c20f-ce08-de54-4ba0fabc72fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.587324] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.509s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 907.587893] env[69328]: DEBUG nova.compute.manager [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 907.590797] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f90bad43-d8d9-49f5-9ed7-78c713b3551a tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.198s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 907.591009] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f90bad43-d8d9-49f5-9ed7-78c713b3551a tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 907.593470] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.057s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 907.593694] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 907.596169] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.856s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 907.597607] env[69328]: INFO nova.compute.claims [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 20f750d7-1914-49bb-802f-464a30ffcf3a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 907.628436] env[69328]: INFO nova.scheduler.client.report [None req-f90bad43-d8d9-49f5-9ed7-78c713b3551a tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Deleted allocations for instance e5a2de79-cfbc-4d9c-8b58-5aa819657978 [ 907.709059] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: b7d6b4ef-fb86-4542-8abf-c8cb1cd71d25] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 908.021786] env[69328]: DEBUG oslo_vmware.api [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3273507, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.632297} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.022081] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] f1be93b2-08db-41fe-87c4-f4e5f964cfa4/f1be93b2-08db-41fe-87c4-f4e5f964cfa4.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 908.022299] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 908.022561] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-494c6431-7158-4dda-a1f6-95833fc0b2f7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.034318] env[69328]: DEBUG oslo_vmware.api [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f57cd7-c20f-ce08-de54-4ba0fabc72fb, 'name': SearchDatastore_Task, 'duration_secs': 0.011076} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.034964] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 908.035241] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] a0952fdf-5570-4112-bc4d-e9f9cee1599c/a0952fdf-5570-4112-bc4d-e9f9cee1599c.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 908.035548] env[69328]: DEBUG oslo_vmware.api [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 908.035548] env[69328]: value = "task-3273508" [ 908.035548] env[69328]: _type = "Task" [ 908.035548] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.035753] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-57c347c6-1502-4c94-8481-4fe48322870e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.045828] env[69328]: DEBUG oslo_vmware.api [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3273508, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.047498] env[69328]: DEBUG oslo_vmware.api [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Waiting for the task: (returnval){ [ 908.047498] env[69328]: value = "task-3273509" [ 908.047498] env[69328]: _type = "Task" [ 908.047498] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.055261] env[69328]: DEBUG oslo_vmware.api [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273509, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.102414] env[69328]: DEBUG nova.compute.utils [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 908.108614] env[69328]: DEBUG nova.compute.manager [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 908.108710] env[69328]: DEBUG nova.network.neutron [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 908.111147] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c22f8390-4178-406e-83cc-1c471e233ec5 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "6ccd0715-0903-4fed-bf80-240f386e4ad8" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 57.923s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 908.111901] env[69328]: DEBUG oslo_concurrency.lockutils [None req-887c48b4-8f6f-4201-aeae-a5cb8814a584 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "6ccd0715-0903-4fed-bf80-240f386e4ad8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 36.968s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 908.112129] env[69328]: DEBUG oslo_concurrency.lockutils [None req-887c48b4-8f6f-4201-aeae-a5cb8814a584 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "6ccd0715-0903-4fed-bf80-240f386e4ad8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 908.112419] env[69328]: DEBUG oslo_concurrency.lockutils [None req-887c48b4-8f6f-4201-aeae-a5cb8814a584 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "6ccd0715-0903-4fed-bf80-240f386e4ad8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 908.112651] env[69328]: DEBUG oslo_concurrency.lockutils [None req-887c48b4-8f6f-4201-aeae-a5cb8814a584 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "6ccd0715-0903-4fed-bf80-240f386e4ad8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 908.115175] env[69328]: INFO nova.compute.manager [None req-887c48b4-8f6f-4201-aeae-a5cb8814a584 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Terminating instance [ 908.139936] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f90bad43-d8d9-49f5-9ed7-78c713b3551a tempest-ServersListShow2100Test-935049087 tempest-ServersListShow2100Test-935049087-project-member] Lock "e5a2de79-cfbc-4d9c-8b58-5aa819657978" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.030s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 908.169582] env[69328]: DEBUG nova.policy [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '435c64c503c043a29f90396ad3b070d9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '87581f423dc64e4fb9fe1d51ebc68597', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 908.212811] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: b7409a67-c140-436f-9c4e-27dae259f648] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 908.374509] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e786a3d-cb91-4a71-bb97-0892b79c3c26 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.397114] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Updating instance 'fd72bae3-cb72-48d0-a0df-9ea3a770a86c' progress to 0 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 908.548844] env[69328]: DEBUG oslo_vmware.api [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3273508, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07509} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.557021] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 908.557021] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7324b3d2-5a64-4bcb-9fa5-291173bd9931 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.565546] env[69328]: DEBUG oslo_vmware.api [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273509, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.588046] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Reconfiguring VM instance instance-00000044 to attach disk [datastore2] f1be93b2-08db-41fe-87c4-f4e5f964cfa4/f1be93b2-08db-41fe-87c4-f4e5f964cfa4.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 908.588046] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-26c9e558-4b15-4092-a3c7-ce4c4da39f86 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.606360] env[69328]: DEBUG oslo_vmware.api [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 908.606360] env[69328]: value = "task-3273510" [ 908.606360] env[69328]: _type = "Task" [ 908.606360] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.610681] env[69328]: DEBUG nova.compute.manager [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 908.617692] env[69328]: DEBUG nova.network.neutron [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Successfully created port: 6fa9c0fb-f285-4d44-8824-09041fd2f8f6 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 908.620293] env[69328]: DEBUG nova.compute.manager [None req-887c48b4-8f6f-4201-aeae-a5cb8814a584 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 908.620594] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-887c48b4-8f6f-4201-aeae-a5cb8814a584 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 908.624642] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-93d4507b-1e02-4a37-ab38-0c61ace8929b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.628323] env[69328]: DEBUG oslo_vmware.api [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3273510, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.636316] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-359919fe-6b33-4f40-9ff7-b0bde28bb7c3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.677940] env[69328]: WARNING nova.virt.vmwareapi.vmops [None req-887c48b4-8f6f-4201-aeae-a5cb8814a584 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6ccd0715-0903-4fed-bf80-240f386e4ad8 could not be found. [ 908.678259] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-887c48b4-8f6f-4201-aeae-a5cb8814a584 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 908.678556] env[69328]: INFO nova.compute.manager [None req-887c48b4-8f6f-4201-aeae-a5cb8814a584 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Took 0.06 seconds to destroy the instance on the hypervisor. [ 908.678832] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-887c48b4-8f6f-4201-aeae-a5cb8814a584 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 908.682819] env[69328]: DEBUG nova.compute.manager [-] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 908.683047] env[69328]: DEBUG nova.network.neutron [-] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 908.716080] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: c3673531-9167-4d33-b8ce-d6afa5e589bc] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 908.903685] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 908.907124] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2cf9dfab-e455-4044-b211-af2c88b64b14 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.914219] env[69328]: DEBUG oslo_vmware.api [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 908.914219] env[69328]: value = "task-3273511" [ 908.914219] env[69328]: _type = "Task" [ 908.914219] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.932220] env[69328]: DEBUG oslo_vmware.api [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273511, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.046772] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-435d5c3e-7f78-485e-b5e2-6328a0d2cfc6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.061618] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51968257-e9a8-49d3-98b6-f96364914a59 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.065661] env[69328]: DEBUG oslo_vmware.api [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273509, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.538106} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.065661] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] a0952fdf-5570-4112-bc4d-e9f9cee1599c/a0952fdf-5570-4112-bc4d-e9f9cee1599c.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 909.065661] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 909.066267] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c1fd7037-83f3-4e43-b0cf-d5592335df27 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.095961] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f757008f-b295-4ffa-9642-0b50fc047889 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.100029] env[69328]: DEBUG oslo_vmware.api [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Waiting for the task: (returnval){ [ 909.100029] env[69328]: value = "task-3273512" [ 909.100029] env[69328]: _type = "Task" [ 909.100029] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.107785] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a3a8f16-db93-4c1a-8a0a-609c44cd7349 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.115366] env[69328]: DEBUG oslo_vmware.api [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273512, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.131833] env[69328]: DEBUG nova.compute.provider_tree [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 909.136633] env[69328]: DEBUG oslo_vmware.api [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3273510, 'name': ReconfigVM_Task, 'duration_secs': 0.286842} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.136845] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Reconfigured VM instance instance-00000044 to attach disk [datastore2] f1be93b2-08db-41fe-87c4-f4e5f964cfa4/f1be93b2-08db-41fe-87c4-f4e5f964cfa4.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 909.137478] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-93ae18cc-c375-4f98-867f-b52657bed847 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.144257] env[69328]: DEBUG oslo_vmware.api [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 909.144257] env[69328]: value = "task-3273513" [ 909.144257] env[69328]: _type = "Task" [ 909.144257] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.153156] env[69328]: DEBUG oslo_vmware.api [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3273513, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.220189] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: bbbfb48d-b474-4a6e-9078-336f23d2c343] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 909.424780] env[69328]: DEBUG oslo_vmware.api [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273511, 'name': PowerOffVM_Task, 'duration_secs': 0.193021} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.425087] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 909.425278] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Updating instance 'fd72bae3-cb72-48d0-a0df-9ea3a770a86c' progress to 17 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 909.472699] env[69328]: DEBUG nova.network.neutron [-] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.610630] env[69328]: DEBUG oslo_vmware.api [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273512, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072344} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.610911] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 909.611774] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fb0a429-10e8-49a8-baa8-ad914f15cb56 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.635767] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Reconfiguring VM instance instance-00000045 to attach disk [datastore2] a0952fdf-5570-4112-bc4d-e9f9cee1599c/a0952fdf-5570-4112-bc4d-e9f9cee1599c.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 909.637010] env[69328]: DEBUG nova.compute.manager [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 909.639137] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8f857365-310d-44ed-b125-11d082f8baf3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.654522] env[69328]: DEBUG nova.scheduler.client.report [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 909.667954] env[69328]: DEBUG oslo_vmware.api [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3273513, 'name': Rename_Task, 'duration_secs': 0.148533} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.670995] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 909.671332] env[69328]: DEBUG oslo_vmware.api [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Waiting for the task: (returnval){ [ 909.671332] env[69328]: value = "task-3273514" [ 909.671332] env[69328]: _type = "Task" [ 909.671332] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.672248] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e535806e-fc1c-4be3-b4ca-ef89c7a89596 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.680217] env[69328]: DEBUG nova.virt.hardware [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 909.680437] env[69328]: DEBUG nova.virt.hardware [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 909.680591] env[69328]: DEBUG nova.virt.hardware [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 909.680768] env[69328]: DEBUG nova.virt.hardware [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 909.680910] env[69328]: DEBUG nova.virt.hardware [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 909.681062] env[69328]: DEBUG nova.virt.hardware [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 909.681268] env[69328]: DEBUG nova.virt.hardware [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 909.681423] env[69328]: DEBUG nova.virt.hardware [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 909.681648] env[69328]: DEBUG nova.virt.hardware [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 909.681842] env[69328]: DEBUG nova.virt.hardware [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 909.682101] env[69328]: DEBUG nova.virt.hardware [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 909.683403] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d90f1812-8714-4cce-8651-98472ce2816d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.687845] env[69328]: DEBUG oslo_vmware.api [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 909.687845] env[69328]: value = "task-3273515" [ 909.687845] env[69328]: _type = "Task" [ 909.687845] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.693357] env[69328]: DEBUG oslo_vmware.api [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273514, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.697445] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21100d45-dc1f-4e8c-9bc0-7f8e743164f2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.706405] env[69328]: DEBUG oslo_vmware.api [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3273515, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.724160] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 84baf472-6eb5-4c92-98eb-e35c14bca4e2] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 909.932728] env[69328]: DEBUG nova.virt.hardware [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:34:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 909.932728] env[69328]: DEBUG nova.virt.hardware [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 909.932728] env[69328]: DEBUG nova.virt.hardware [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 909.932728] env[69328]: DEBUG nova.virt.hardware [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 909.933058] env[69328]: DEBUG nova.virt.hardware [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 909.933058] env[69328]: DEBUG nova.virt.hardware [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 909.933202] env[69328]: DEBUG nova.virt.hardware [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 909.933359] env[69328]: DEBUG nova.virt.hardware [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 909.933523] env[69328]: DEBUG nova.virt.hardware [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 909.933763] env[69328]: DEBUG nova.virt.hardware [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 909.933867] env[69328]: DEBUG nova.virt.hardware [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 909.938938] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-84fbe9c0-72db-4afe-a163-95bc9b8fc053 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.955994] env[69328]: DEBUG oslo_vmware.api [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 909.955994] env[69328]: value = "task-3273516" [ 909.955994] env[69328]: _type = "Task" [ 909.955994] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.985235] env[69328]: INFO nova.compute.manager [-] [instance: 6ccd0715-0903-4fed-bf80-240f386e4ad8] Took 1.30 seconds to deallocate network for instance. [ 909.985709] env[69328]: DEBUG oslo_vmware.api [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273516, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.162936] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.566s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 910.163047] env[69328]: DEBUG nova.compute.manager [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 20f750d7-1914-49bb-802f-464a30ffcf3a] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 910.166054] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.651s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 910.167603] env[69328]: INFO nova.compute.claims [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 910.185403] env[69328]: DEBUG oslo_vmware.api [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273514, 'name': ReconfigVM_Task, 'duration_secs': 0.272766} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.185690] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Reconfigured VM instance instance-00000045 to attach disk [datastore2] a0952fdf-5570-4112-bc4d-e9f9cee1599c/a0952fdf-5570-4112-bc4d-e9f9cee1599c.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 910.186368] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dff4b638-d9f6-49c1-8ec1-0aff8ff80e66 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.192861] env[69328]: DEBUG oslo_vmware.api [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Waiting for the task: (returnval){ [ 910.192861] env[69328]: value = "task-3273517" [ 910.192861] env[69328]: _type = "Task" [ 910.192861] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.204953] env[69328]: DEBUG oslo_vmware.api [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273517, 'name': Rename_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.209368] env[69328]: DEBUG oslo_vmware.api [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3273515, 'name': PowerOnVM_Task, 'duration_secs': 0.473759} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.209675] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 910.209892] env[69328]: INFO nova.compute.manager [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Took 9.18 seconds to spawn the instance on the hypervisor. [ 910.210083] env[69328]: DEBUG nova.compute.manager [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 910.210947] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ca2def2-1aca-412b-bcd7-360238564fc7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.217841] env[69328]: DEBUG nova.compute.manager [req-484479c3-2b04-4d1b-8e2a-aa7599a3f854 req-d7a51e1f-8228-4001-83f1-9ccb780ffb05 service nova] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Received event network-vif-plugged-6fa9c0fb-f285-4d44-8824-09041fd2f8f6 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 910.218098] env[69328]: DEBUG oslo_concurrency.lockutils [req-484479c3-2b04-4d1b-8e2a-aa7599a3f854 req-d7a51e1f-8228-4001-83f1-9ccb780ffb05 service nova] Acquiring lock "3ba646e8-a5c8-4917-a1c4-32b37affb598-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 910.218304] env[69328]: DEBUG oslo_concurrency.lockutils [req-484479c3-2b04-4d1b-8e2a-aa7599a3f854 req-d7a51e1f-8228-4001-83f1-9ccb780ffb05 service nova] Lock "3ba646e8-a5c8-4917-a1c4-32b37affb598-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 910.218501] env[69328]: DEBUG oslo_concurrency.lockutils [req-484479c3-2b04-4d1b-8e2a-aa7599a3f854 req-d7a51e1f-8228-4001-83f1-9ccb780ffb05 service nova] Lock "3ba646e8-a5c8-4917-a1c4-32b37affb598-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 910.218642] env[69328]: DEBUG nova.compute.manager [req-484479c3-2b04-4d1b-8e2a-aa7599a3f854 req-d7a51e1f-8228-4001-83f1-9ccb780ffb05 service nova] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] No waiting events found dispatching network-vif-plugged-6fa9c0fb-f285-4d44-8824-09041fd2f8f6 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 910.218806] env[69328]: WARNING nova.compute.manager [req-484479c3-2b04-4d1b-8e2a-aa7599a3f854 req-d7a51e1f-8228-4001-83f1-9ccb780ffb05 service nova] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Received unexpected event network-vif-plugged-6fa9c0fb-f285-4d44-8824-09041fd2f8f6 for instance with vm_state building and task_state spawning. [ 910.227785] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 5b0e8bef-dcfc-4c5e-89d2-aa1748050d29] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 910.330807] env[69328]: DEBUG nova.network.neutron [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Successfully updated port: 6fa9c0fb-f285-4d44-8824-09041fd2f8f6 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 910.411137] env[69328]: DEBUG oslo_vmware.rw_handles [None req-e6d81449-25cd-45e1-8e17-bfe7322d4637 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52127d3e-94d4-f421-a530-4eb679c4bb49/disk-0.vmdk. {{(pid=69328) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 910.412136] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0579051-dd83-4e0b-a6d7-459e9b223e08 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.418769] env[69328]: DEBUG oslo_vmware.rw_handles [None req-e6d81449-25cd-45e1-8e17-bfe7322d4637 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52127d3e-94d4-f421-a530-4eb679c4bb49/disk-0.vmdk is in state: ready. {{(pid=69328) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 910.418769] env[69328]: ERROR oslo_vmware.rw_handles [None req-e6d81449-25cd-45e1-8e17-bfe7322d4637 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52127d3e-94d4-f421-a530-4eb679c4bb49/disk-0.vmdk due to incomplete transfer. [ 910.418976] env[69328]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-39789c83-4c56-4628-a67c-0f87b26e4841 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.424991] env[69328]: DEBUG oslo_vmware.rw_handles [None req-e6d81449-25cd-45e1-8e17-bfe7322d4637 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52127d3e-94d4-f421-a530-4eb679c4bb49/disk-0.vmdk. {{(pid=69328) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 910.425201] env[69328]: DEBUG nova.virt.vmwareapi.images [None req-e6d81449-25cd-45e1-8e17-bfe7322d4637 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Uploaded image 73e0a420-6bf5-4ed4-835c-91d5b10628ee to the Glance image server {{(pid=69328) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 910.427400] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6d81449-25cd-45e1-8e17-bfe7322d4637 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Destroying the VM {{(pid=69328) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 910.427633] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-cf2ec308-77cf-4775-bd7b-c24f75053147 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.433170] env[69328]: DEBUG oslo_vmware.api [None req-e6d81449-25cd-45e1-8e17-bfe7322d4637 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 910.433170] env[69328]: value = "task-3273518" [ 910.433170] env[69328]: _type = "Task" [ 910.433170] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.442306] env[69328]: DEBUG oslo_vmware.api [None req-e6d81449-25cd-45e1-8e17-bfe7322d4637 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273518, 'name': Destroy_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.466701] env[69328]: DEBUG oslo_vmware.api [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273516, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.672627] env[69328]: DEBUG nova.compute.utils [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 910.676041] env[69328]: DEBUG nova.compute.manager [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 20f750d7-1914-49bb-802f-464a30ffcf3a] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 910.676174] env[69328]: DEBUG nova.network.neutron [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 20f750d7-1914-49bb-802f-464a30ffcf3a] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 910.703305] env[69328]: DEBUG oslo_vmware.api [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273517, 'name': Rename_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.724288] env[69328]: DEBUG nova.policy [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a8fbe2a134194d29af48ac8e4986d0cb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd86de4d5055642aa86a29c6768e3db46', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 910.730817] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 15a8de08-4d20-4329-9867-53e5dff82878] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 910.738543] env[69328]: INFO nova.compute.manager [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Took 59.08 seconds to build instance. [ 910.833173] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "refresh_cache-3ba646e8-a5c8-4917-a1c4-32b37affb598" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 910.833398] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquired lock "refresh_cache-3ba646e8-a5c8-4917-a1c4-32b37affb598" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 910.833594] env[69328]: DEBUG nova.network.neutron [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 910.943322] env[69328]: DEBUG oslo_vmware.api [None req-e6d81449-25cd-45e1-8e17-bfe7322d4637 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273518, 'name': Destroy_Task, 'duration_secs': 0.332209} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.943586] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-e6d81449-25cd-45e1-8e17-bfe7322d4637 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Destroyed the VM [ 910.943842] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e6d81449-25cd-45e1-8e17-bfe7322d4637 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Deleting Snapshot of the VM instance {{(pid=69328) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 910.944158] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-aed7dce3-817f-49b6-b0ee-d5934bdc9450 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.951112] env[69328]: DEBUG oslo_vmware.api [None req-e6d81449-25cd-45e1-8e17-bfe7322d4637 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 910.951112] env[69328]: value = "task-3273519" [ 910.951112] env[69328]: _type = "Task" [ 910.951112] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.959826] env[69328]: DEBUG oslo_vmware.api [None req-e6d81449-25cd-45e1-8e17-bfe7322d4637 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273519, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.968267] env[69328]: DEBUG oslo_vmware.api [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273516, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.018217] env[69328]: DEBUG oslo_concurrency.lockutils [None req-887c48b4-8f6f-4201-aeae-a5cb8814a584 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "6ccd0715-0903-4fed-bf80-240f386e4ad8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.906s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 911.144485] env[69328]: DEBUG nova.network.neutron [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 20f750d7-1914-49bb-802f-464a30ffcf3a] Successfully created port: e4e1721b-99c8-403d-b056-e89c1a106a41 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 911.180021] env[69328]: DEBUG nova.compute.manager [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 20f750d7-1914-49bb-802f-464a30ffcf3a] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 911.208933] env[69328]: DEBUG oslo_vmware.api [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273517, 'name': Rename_Task, 'duration_secs': 0.893188} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.210547] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 911.212816] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7526a07e-fae8-4f5a-a304-59c6bf5087f6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.221226] env[69328]: DEBUG oslo_vmware.api [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Waiting for the task: (returnval){ [ 911.221226] env[69328]: value = "task-3273520" [ 911.221226] env[69328]: _type = "Task" [ 911.221226] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.233217] env[69328]: DEBUG oslo_vmware.api [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273520, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.236108] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 9753734d-90f0-4661-8029-ec312e88eb60] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 911.241129] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cc90b55e-708b-49cb-9387-9b6379fc23b8 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "f1be93b2-08db-41fe-87c4-f4e5f964cfa4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.590s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 911.256736] env[69328]: DEBUG oslo_concurrency.lockutils [None req-27776706-5241-4665-a52a-b96bfa6a670b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquiring lock "d017d08e-5f9e-4d05-8914-3320d4c87c9b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 911.256736] env[69328]: DEBUG oslo_concurrency.lockutils [None req-27776706-5241-4665-a52a-b96bfa6a670b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lock "d017d08e-5f9e-4d05-8914-3320d4c87c9b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 911.256736] env[69328]: DEBUG oslo_concurrency.lockutils [None req-27776706-5241-4665-a52a-b96bfa6a670b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquiring lock "d017d08e-5f9e-4d05-8914-3320d4c87c9b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 911.257116] env[69328]: DEBUG oslo_concurrency.lockutils [None req-27776706-5241-4665-a52a-b96bfa6a670b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lock "d017d08e-5f9e-4d05-8914-3320d4c87c9b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 911.257480] env[69328]: DEBUG oslo_concurrency.lockutils [None req-27776706-5241-4665-a52a-b96bfa6a670b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lock "d017d08e-5f9e-4d05-8914-3320d4c87c9b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 911.265041] env[69328]: INFO nova.compute.manager [None req-27776706-5241-4665-a52a-b96bfa6a670b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Terminating instance [ 911.394842] env[69328]: DEBUG nova.network.neutron [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 911.460900] env[69328]: DEBUG oslo_vmware.api [None req-e6d81449-25cd-45e1-8e17-bfe7322d4637 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273519, 'name': RemoveSnapshot_Task, 'duration_secs': 0.454108} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.465460] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e6d81449-25cd-45e1-8e17-bfe7322d4637 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Deleted Snapshot of the VM instance {{(pid=69328) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 911.465778] env[69328]: INFO nova.compute.manager [None req-e6d81449-25cd-45e1-8e17-bfe7322d4637 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Took 12.09 seconds to snapshot the instance on the hypervisor. [ 911.476852] env[69328]: DEBUG oslo_vmware.api [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273516, 'name': ReconfigVM_Task, 'duration_secs': 1.113323} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.477214] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Updating instance 'fd72bae3-cb72-48d0-a0df-9ea3a770a86c' progress to 33 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 911.610908] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cdde656-e7fe-473f-8aaf-2231633c0745 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.619286] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d3a315e-d1ed-46db-b953-381cea7caa6b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.655531] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-389b5ae3-56a8-4e80-952a-5da8ce55f03a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.662690] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-683e3696-dc85-4206-8e9a-acdcd7d10105 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.676638] env[69328]: DEBUG nova.compute.provider_tree [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 911.712306] env[69328]: DEBUG nova.network.neutron [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Updating instance_info_cache with network_info: [{"id": "6fa9c0fb-f285-4d44-8824-09041fd2f8f6", "address": "fa:16:3e:96:60:e7", "network": {"id": "ce3bec03-01f9-4ac9-80e4-bfb944c0bb66", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-545997027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87581f423dc64e4fb9fe1d51ebc68597", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6fa9c0fb-f2", "ovs_interfaceid": "6fa9c0fb-f285-4d44-8824-09041fd2f8f6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 911.734520] env[69328]: DEBUG oslo_vmware.api [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273520, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.741053] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: e92953f4-b634-4ef9-a5ad-63a886cfa007] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 911.775744] env[69328]: DEBUG nova.compute.manager [None req-27776706-5241-4665-a52a-b96bfa6a670b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 911.775970] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-27776706-5241-4665-a52a-b96bfa6a670b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 911.777911] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c29eb3a-44d2-43de-b476-7ddf0619a461 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.786737] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-27776706-5241-4665-a52a-b96bfa6a670b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 911.786804] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1ffeb44a-69e3-471c-9786-38461010bed1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.850463] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-27776706-5241-4665-a52a-b96bfa6a670b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 911.850718] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-27776706-5241-4665-a52a-b96bfa6a670b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 911.850904] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-27776706-5241-4665-a52a-b96bfa6a670b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Deleting the datastore file [datastore1] d017d08e-5f9e-4d05-8914-3320d4c87c9b {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 911.851202] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8c50cf08-df49-43ba-8980-4fafcab1f7cf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.858275] env[69328]: DEBUG nova.compute.manager [req-0bd64db9-d811-4176-9cd0-292ef934a4d2 req-bff87463-463c-4ae9-81a8-7181c33efd03 service nova] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Received event network-changed-1018560a-13d7-4d01-8fc4-03d0b9beab90 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 911.858502] env[69328]: DEBUG nova.compute.manager [req-0bd64db9-d811-4176-9cd0-292ef934a4d2 req-bff87463-463c-4ae9-81a8-7181c33efd03 service nova] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Refreshing instance network info cache due to event network-changed-1018560a-13d7-4d01-8fc4-03d0b9beab90. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 911.858759] env[69328]: DEBUG oslo_concurrency.lockutils [req-0bd64db9-d811-4176-9cd0-292ef934a4d2 req-bff87463-463c-4ae9-81a8-7181c33efd03 service nova] Acquiring lock "refresh_cache-f1be93b2-08db-41fe-87c4-f4e5f964cfa4" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.860745] env[69328]: DEBUG oslo_concurrency.lockutils [req-0bd64db9-d811-4176-9cd0-292ef934a4d2 req-bff87463-463c-4ae9-81a8-7181c33efd03 service nova] Acquired lock "refresh_cache-f1be93b2-08db-41fe-87c4-f4e5f964cfa4" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 911.860745] env[69328]: DEBUG nova.network.neutron [req-0bd64db9-d811-4176-9cd0-292ef934a4d2 req-bff87463-463c-4ae9-81a8-7181c33efd03 service nova] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Refreshing network info cache for port 1018560a-13d7-4d01-8fc4-03d0b9beab90 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 911.862557] env[69328]: DEBUG oslo_vmware.api [None req-27776706-5241-4665-a52a-b96bfa6a670b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 911.862557] env[69328]: value = "task-3273522" [ 911.862557] env[69328]: _type = "Task" [ 911.862557] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.873938] env[69328]: DEBUG oslo_vmware.api [None req-27776706-5241-4665-a52a-b96bfa6a670b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273522, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.905496] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquiring lock "b21ff3c9-d53a-4065-a271-682c2f1b895d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 911.905780] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lock "b21ff3c9-d53a-4065-a271-682c2f1b895d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 911.975441] env[69328]: DEBUG nova.compute.manager [None req-e6d81449-25cd-45e1-8e17-bfe7322d4637 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Instance disappeared during snapshot {{(pid=69328) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 911.987026] env[69328]: DEBUG nova.virt.hardware [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 911.987026] env[69328]: DEBUG nova.virt.hardware [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 911.987026] env[69328]: DEBUG nova.virt.hardware [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 911.987026] env[69328]: DEBUG nova.virt.hardware [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 911.987026] env[69328]: DEBUG nova.virt.hardware [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 911.987026] env[69328]: DEBUG nova.virt.hardware [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 911.989737] env[69328]: DEBUG nova.virt.hardware [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 911.989806] env[69328]: DEBUG nova.virt.hardware [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 911.990013] env[69328]: DEBUG nova.virt.hardware [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 911.990182] env[69328]: DEBUG nova.virt.hardware [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 911.990375] env[69328]: DEBUG nova.virt.hardware [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 911.995841] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Reconfiguring VM instance instance-0000003e to detach disk 2000 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 911.996879] env[69328]: DEBUG nova.compute.manager [None req-e6d81449-25cd-45e1-8e17-bfe7322d4637 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Image not found during clean up 73e0a420-6bf5-4ed4-835c-91d5b10628ee {{(pid=69328) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4601}} [ 911.998710] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b323174a-5759-4bc3-863c-a0e97126f3c9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.019104] env[69328]: DEBUG oslo_vmware.api [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 912.019104] env[69328]: value = "task-3273523" [ 912.019104] env[69328]: _type = "Task" [ 912.019104] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.028804] env[69328]: DEBUG oslo_vmware.api [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273523, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.179357] env[69328]: DEBUG nova.scheduler.client.report [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 912.189516] env[69328]: DEBUG nova.compute.manager [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 20f750d7-1914-49bb-802f-464a30ffcf3a] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 912.213742] env[69328]: DEBUG nova.virt.hardware [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 912.214043] env[69328]: DEBUG nova.virt.hardware [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 912.214230] env[69328]: DEBUG nova.virt.hardware [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 912.214439] env[69328]: DEBUG nova.virt.hardware [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 912.214607] env[69328]: DEBUG nova.virt.hardware [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 912.214778] env[69328]: DEBUG nova.virt.hardware [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 912.214993] env[69328]: DEBUG nova.virt.hardware [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 912.215171] env[69328]: DEBUG nova.virt.hardware [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 912.215339] env[69328]: DEBUG nova.virt.hardware [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 912.215500] env[69328]: DEBUG nova.virt.hardware [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 912.215673] env[69328]: DEBUG nova.virt.hardware [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 912.216391] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Releasing lock "refresh_cache-3ba646e8-a5c8-4917-a1c4-32b37affb598" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 912.216680] env[69328]: DEBUG nova.compute.manager [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Instance network_info: |[{"id": "6fa9c0fb-f285-4d44-8824-09041fd2f8f6", "address": "fa:16:3e:96:60:e7", "network": {"id": "ce3bec03-01f9-4ac9-80e4-bfb944c0bb66", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-545997027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87581f423dc64e4fb9fe1d51ebc68597", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6fa9c0fb-f2", "ovs_interfaceid": "6fa9c0fb-f285-4d44-8824-09041fd2f8f6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 912.217528] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c70b9ac5-20e9-4628-8b0b-d566d6fa9e2f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.220625] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:96:60:e7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1768af3d-3317-4ef5-b484-0c2707d63de7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6fa9c0fb-f285-4d44-8824-09041fd2f8f6', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 912.228224] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 912.228534] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 912.232292] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-92015625-adb0-4839-bd96-0c5b6c658b8e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.249520] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 46526210-2783-408d-9ecb-773f33ff0c66] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 912.252195] env[69328]: DEBUG nova.compute.manager [req-447835c2-c119-4479-a6e8-8dc391948ed0 req-2845f695-fb80-492f-9954-c791f771a01c service nova] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Received event network-changed-6fa9c0fb-f285-4d44-8824-09041fd2f8f6 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 912.252379] env[69328]: DEBUG nova.compute.manager [req-447835c2-c119-4479-a6e8-8dc391948ed0 req-2845f695-fb80-492f-9954-c791f771a01c service nova] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Refreshing instance network info cache due to event network-changed-6fa9c0fb-f285-4d44-8824-09041fd2f8f6. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 912.252895] env[69328]: DEBUG oslo_concurrency.lockutils [req-447835c2-c119-4479-a6e8-8dc391948ed0 req-2845f695-fb80-492f-9954-c791f771a01c service nova] Acquiring lock "refresh_cache-3ba646e8-a5c8-4917-a1c4-32b37affb598" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 912.253086] env[69328]: DEBUG oslo_concurrency.lockutils [req-447835c2-c119-4479-a6e8-8dc391948ed0 req-2845f695-fb80-492f-9954-c791f771a01c service nova] Acquired lock "refresh_cache-3ba646e8-a5c8-4917-a1c4-32b37affb598" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 912.253261] env[69328]: DEBUG nova.network.neutron [req-447835c2-c119-4479-a6e8-8dc391948ed0 req-2845f695-fb80-492f-9954-c791f771a01c service nova] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Refreshing network info cache for port 6fa9c0fb-f285-4d44-8824-09041fd2f8f6 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 912.256519] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c878a8cd-c07a-4069-8d11-a1fc0bcb9f5e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.265154] env[69328]: DEBUG oslo_vmware.api [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273520, 'name': PowerOnVM_Task, 'duration_secs': 0.550161} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.265343] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 912.265343] env[69328]: value = "task-3273524" [ 912.265343] env[69328]: _type = "Task" [ 912.265343] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.266474] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 912.266719] env[69328]: INFO nova.compute.manager [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Took 8.77 seconds to spawn the instance on the hypervisor. [ 912.266914] env[69328]: DEBUG nova.compute.manager [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 912.267909] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd3c95e0-4648-48d0-80c7-b5d09ddd24c2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.288986] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273524, 'name': CreateVM_Task} progress is 15%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.376868] env[69328]: DEBUG oslo_vmware.api [None req-27776706-5241-4665-a52a-b96bfa6a670b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273522, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.159616} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.376868] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-27776706-5241-4665-a52a-b96bfa6a670b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 912.377801] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-27776706-5241-4665-a52a-b96bfa6a670b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 912.378068] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-27776706-5241-4665-a52a-b96bfa6a670b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 912.378259] env[69328]: INFO nova.compute.manager [None req-27776706-5241-4665-a52a-b96bfa6a670b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Took 0.60 seconds to destroy the instance on the hypervisor. [ 912.378506] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-27776706-5241-4665-a52a-b96bfa6a670b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 912.378704] env[69328]: DEBUG nova.compute.manager [-] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 912.378799] env[69328]: DEBUG nova.network.neutron [-] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 912.408999] env[69328]: DEBUG nova.compute.manager [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 912.529530] env[69328]: DEBUG oslo_vmware.api [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273523, 'name': ReconfigVM_Task, 'duration_secs': 0.364197} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.529991] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Reconfigured VM instance instance-0000003e to detach disk 2000 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 912.532122] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b9b76e1-cfbe-4803-afeb-952b34d46e06 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.567761] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Reconfiguring VM instance instance-0000003e to attach disk [datastore2] fd72bae3-cb72-48d0-a0df-9ea3a770a86c/fd72bae3-cb72-48d0-a0df-9ea3a770a86c.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 912.568264] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9c278b4f-8624-4e43-927e-03c1f249d387 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.605015] env[69328]: DEBUG oslo_vmware.api [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 912.605015] env[69328]: value = "task-3273525" [ 912.605015] env[69328]: _type = "Task" [ 912.605015] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.616695] env[69328]: DEBUG oslo_vmware.api [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273525, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.677066] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "0a485411-3206-4674-90e4-58df4a8b755a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 912.677312] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "0a485411-3206-4674-90e4-58df4a8b755a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 912.684259] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.518s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 912.684788] env[69328]: DEBUG nova.compute.manager [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 912.690275] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.509s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 912.690275] env[69328]: INFO nova.compute.claims [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 912.756023] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 6102f8e6-f815-4f5f-921f-990be81fca0d] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 912.776946] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273524, 'name': CreateVM_Task, 'duration_secs': 0.350609} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.777137] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 912.777812] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 912.778015] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 912.778383] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 912.778650] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9b931ba-3d99-465f-ab31-9e3c9392b15e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.783612] env[69328]: DEBUG oslo_vmware.api [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 912.783612] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52af35f6-90a4-38d4-fce2-6fc47837ea81" [ 912.783612] env[69328]: _type = "Task" [ 912.783612] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.802998] env[69328]: DEBUG oslo_vmware.api [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52af35f6-90a4-38d4-fce2-6fc47837ea81, 'name': SearchDatastore_Task, 'duration_secs': 0.009919} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.804850] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 912.805395] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 912.805704] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 912.805872] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 912.806148] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 912.806320] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9b1946f0-bb83-4040-ae00-1cf7903632bb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.809181] env[69328]: INFO nova.compute.manager [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Took 51.55 seconds to build instance. [ 912.818188] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 912.818188] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 912.818188] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec5dee12-98cc-463a-8539-dbb0b11b8db8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.825955] env[69328]: DEBUG oslo_vmware.api [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 912.825955] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]529753fc-c67b-701c-d590-67c75dc3a119" [ 912.825955] env[69328]: _type = "Task" [ 912.825955] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.834311] env[69328]: DEBUG oslo_vmware.api [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]529753fc-c67b-701c-d590-67c75dc3a119, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.869022] env[69328]: DEBUG nova.network.neutron [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 20f750d7-1914-49bb-802f-464a30ffcf3a] Successfully updated port: e4e1721b-99c8-403d-b056-e89c1a106a41 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 912.873775] env[69328]: DEBUG nova.network.neutron [req-0bd64db9-d811-4176-9cd0-292ef934a4d2 req-bff87463-463c-4ae9-81a8-7181c33efd03 service nova] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Updated VIF entry in instance network info cache for port 1018560a-13d7-4d01-8fc4-03d0b9beab90. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 912.874146] env[69328]: DEBUG nova.network.neutron [req-0bd64db9-d811-4176-9cd0-292ef934a4d2 req-bff87463-463c-4ae9-81a8-7181c33efd03 service nova] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Updating instance_info_cache with network_info: [{"id": "1018560a-13d7-4d01-8fc4-03d0b9beab90", "address": "fa:16:3e:33:ba:27", "network": {"id": "4031def8-0553-461f-9528-aa338b9d7b2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1834835647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.196", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f357b5a9494b4849a83aa934c5d4e26b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "357d2811-e990-4985-9f9e-b158d10d3699", "external-id": "nsx-vlan-transportzone-641", "segmentation_id": 641, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1018560a-13", "ovs_interfaceid": "1018560a-13d7-4d01-8fc4-03d0b9beab90", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 912.941168] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 913.095051] env[69328]: DEBUG nova.network.neutron [req-447835c2-c119-4479-a6e8-8dc391948ed0 req-2845f695-fb80-492f-9954-c791f771a01c service nova] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Updated VIF entry in instance network info cache for port 6fa9c0fb-f285-4d44-8824-09041fd2f8f6. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 913.095051] env[69328]: DEBUG nova.network.neutron [req-447835c2-c119-4479-a6e8-8dc391948ed0 req-2845f695-fb80-492f-9954-c791f771a01c service nova] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Updating instance_info_cache with network_info: [{"id": "6fa9c0fb-f285-4d44-8824-09041fd2f8f6", "address": "fa:16:3e:96:60:e7", "network": {"id": "ce3bec03-01f9-4ac9-80e4-bfb944c0bb66", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-545997027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87581f423dc64e4fb9fe1d51ebc68597", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6fa9c0fb-f2", "ovs_interfaceid": "6fa9c0fb-f285-4d44-8824-09041fd2f8f6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.116968] env[69328]: DEBUG oslo_vmware.api [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273525, 'name': ReconfigVM_Task, 'duration_secs': 0.343859} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.117301] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Reconfigured VM instance instance-0000003e to attach disk [datastore2] fd72bae3-cb72-48d0-a0df-9ea3a770a86c/fd72bae3-cb72-48d0-a0df-9ea3a770a86c.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 913.117589] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Updating instance 'fd72bae3-cb72-48d0-a0df-9ea3a770a86c' progress to 50 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 913.180516] env[69328]: DEBUG nova.compute.manager [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 913.192905] env[69328]: DEBUG nova.compute.utils [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 913.197135] env[69328]: DEBUG nova.compute.manager [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 913.197310] env[69328]: DEBUG nova.network.neutron [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 913.235466] env[69328]: DEBUG nova.policy [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '047fba3350d249e6b48eda735fc10786', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'efd0e2d2f9ba4416bd8fd08dad912465', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 913.261377] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 7b348a95-3ab2-4112-87e3-b17504c0a302] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 913.311729] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bcba815e-47b0-436c-9b96-8a0ff823ef73 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Lock "a0952fdf-5570-4112-bc4d-e9f9cee1599c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.064s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 913.336994] env[69328]: DEBUG oslo_vmware.api [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]529753fc-c67b-701c-d590-67c75dc3a119, 'name': SearchDatastore_Task, 'duration_secs': 0.01437} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.337928] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f7b1dc7f-27db-4dd0-862d-68e65fbd1a0c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.344182] env[69328]: DEBUG oslo_vmware.api [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 913.344182] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52db4d37-3d0f-bc18-6452-4d181a07ff2e" [ 913.344182] env[69328]: _type = "Task" [ 913.344182] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.352503] env[69328]: DEBUG oslo_vmware.api [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52db4d37-3d0f-bc18-6452-4d181a07ff2e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.376116] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "refresh_cache-20f750d7-1914-49bb-802f-464a30ffcf3a" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 913.376116] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquired lock "refresh_cache-20f750d7-1914-49bb-802f-464a30ffcf3a" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 913.376116] env[69328]: DEBUG nova.network.neutron [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 20f750d7-1914-49bb-802f-464a30ffcf3a] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 913.378711] env[69328]: DEBUG oslo_concurrency.lockutils [req-0bd64db9-d811-4176-9cd0-292ef934a4d2 req-bff87463-463c-4ae9-81a8-7181c33efd03 service nova] Releasing lock "refresh_cache-f1be93b2-08db-41fe-87c4-f4e5f964cfa4" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 913.448021] env[69328]: DEBUG nova.network.neutron [-] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.542844] env[69328]: DEBUG nova.network.neutron [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Successfully created port: 2a3862dd-bd04-40ed-9d66-1fa2418297ea {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 913.597541] env[69328]: DEBUG oslo_concurrency.lockutils [req-447835c2-c119-4479-a6e8-8dc391948ed0 req-2845f695-fb80-492f-9954-c791f771a01c service nova] Releasing lock "refresh_cache-3ba646e8-a5c8-4917-a1c4-32b37affb598" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 913.627575] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d210f91-ff4c-4806-a7ba-b26c2dfac997 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.647355] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0fdc391-bff4-4ab2-8021-097d5197c469 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.667036] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Updating instance 'fd72bae3-cb72-48d0-a0df-9ea3a770a86c' progress to 67 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 913.698372] env[69328]: DEBUG nova.compute.manager [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 913.707605] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 913.765648] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: f428f9a9-d792-4c1c-b2d4-ea066cc09d67] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 913.857144] env[69328]: DEBUG oslo_vmware.api [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52db4d37-3d0f-bc18-6452-4d181a07ff2e, 'name': SearchDatastore_Task, 'duration_secs': 0.009743} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.858219] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 913.858491] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 3ba646e8-a5c8-4917-a1c4-32b37affb598/3ba646e8-a5c8-4917-a1c4-32b37affb598.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 913.863664] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3225b0c3-1631-4a6a-a13a-226b4cfa8cd3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.866335] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Acquiring lock "82e27131-b401-4885-83fb-825e5c8e2444" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 913.866525] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Lock "82e27131-b401-4885-83fb-825e5c8e2444" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 913.874315] env[69328]: DEBUG oslo_vmware.api [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 913.874315] env[69328]: value = "task-3273526" [ 913.874315] env[69328]: _type = "Task" [ 913.874315] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.887928] env[69328]: DEBUG oslo_vmware.api [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273526, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.891515] env[69328]: DEBUG nova.compute.manager [req-ad7342aa-8fc3-40aa-a864-d84957e04212 req-bd4f1d9c-60a0-42fa-a19f-e0ba11e4c5c4 service nova] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Received event network-vif-deleted-efb2e2c6-d681-4301-b80d-b7a78c91677c {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 913.915819] env[69328]: DEBUG nova.network.neutron [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 20f750d7-1914-49bb-802f-464a30ffcf3a] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 913.949613] env[69328]: INFO nova.compute.manager [-] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Took 1.57 seconds to deallocate network for instance. [ 914.080635] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b021dba2-44f2-41e4-b337-a2dc22df7c45 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.084441] env[69328]: DEBUG nova.network.neutron [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 20f750d7-1914-49bb-802f-464a30ffcf3a] Updating instance_info_cache with network_info: [{"id": "e4e1721b-99c8-403d-b056-e89c1a106a41", "address": "fa:16:3e:2e:52:de", "network": {"id": "7f5dcab4-3cec-42a1-b589-88cb373af645", "bridge": "br-int", "label": "tempest-ServersTestJSON-1833802368-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d86de4d5055642aa86a29c6768e3db46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b94712a6-b777-47dd-bc06-f9acfce2d936", "external-id": "nsx-vlan-transportzone-494", "segmentation_id": 494, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4e1721b-99", "ovs_interfaceid": "e4e1721b-99c8-403d-b056-e89c1a106a41", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 914.091667] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88229e63-8709-40bc-b6b9-fa1f6e8656c8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.125541] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06b6c8bc-0dab-4a65-bd29-7fdec311fc58 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.134238] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6893fd41-1f7d-4ccb-8acc-ef4226bd0f92 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.149029] env[69328]: DEBUG nova.compute.provider_tree [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 914.225352] env[69328]: DEBUG nova.network.neutron [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Port eebd5d04-278d-4e22-9e5d-df5ae37877cf binding to destination host cpu-1 is already ACTIVE {{(pid=69328) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 914.269315] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: bd2e4c6d-db41-4b3a-989d-02aaa59b4a6f] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 914.275167] env[69328]: DEBUG nova.compute.manager [req-491a4e1c-314a-4daa-a11b-9f965cf06e0f req-327ae0fe-b4c4-4c77-b65e-b29e5e6ace2c service nova] [instance: 20f750d7-1914-49bb-802f-464a30ffcf3a] Received event network-vif-plugged-e4e1721b-99c8-403d-b056-e89c1a106a41 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 914.275422] env[69328]: DEBUG oslo_concurrency.lockutils [req-491a4e1c-314a-4daa-a11b-9f965cf06e0f req-327ae0fe-b4c4-4c77-b65e-b29e5e6ace2c service nova] Acquiring lock "20f750d7-1914-49bb-802f-464a30ffcf3a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 914.275766] env[69328]: DEBUG oslo_concurrency.lockutils [req-491a4e1c-314a-4daa-a11b-9f965cf06e0f req-327ae0fe-b4c4-4c77-b65e-b29e5e6ace2c service nova] Lock "20f750d7-1914-49bb-802f-464a30ffcf3a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 914.275964] env[69328]: DEBUG oslo_concurrency.lockutils [req-491a4e1c-314a-4daa-a11b-9f965cf06e0f req-327ae0fe-b4c4-4c77-b65e-b29e5e6ace2c service nova] Lock "20f750d7-1914-49bb-802f-464a30ffcf3a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 914.276022] env[69328]: DEBUG nova.compute.manager [req-491a4e1c-314a-4daa-a11b-9f965cf06e0f req-327ae0fe-b4c4-4c77-b65e-b29e5e6ace2c service nova] [instance: 20f750d7-1914-49bb-802f-464a30ffcf3a] No waiting events found dispatching network-vif-plugged-e4e1721b-99c8-403d-b056-e89c1a106a41 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 914.276197] env[69328]: WARNING nova.compute.manager [req-491a4e1c-314a-4daa-a11b-9f965cf06e0f req-327ae0fe-b4c4-4c77-b65e-b29e5e6ace2c service nova] [instance: 20f750d7-1914-49bb-802f-464a30ffcf3a] Received unexpected event network-vif-plugged-e4e1721b-99c8-403d-b056-e89c1a106a41 for instance with vm_state building and task_state spawning. [ 914.276299] env[69328]: DEBUG nova.compute.manager [req-491a4e1c-314a-4daa-a11b-9f965cf06e0f req-327ae0fe-b4c4-4c77-b65e-b29e5e6ace2c service nova] [instance: 20f750d7-1914-49bb-802f-464a30ffcf3a] Received event network-changed-e4e1721b-99c8-403d-b056-e89c1a106a41 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 914.276467] env[69328]: DEBUG nova.compute.manager [req-491a4e1c-314a-4daa-a11b-9f965cf06e0f req-327ae0fe-b4c4-4c77-b65e-b29e5e6ace2c service nova] [instance: 20f750d7-1914-49bb-802f-464a30ffcf3a] Refreshing instance network info cache due to event network-changed-e4e1721b-99c8-403d-b056-e89c1a106a41. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 914.276641] env[69328]: DEBUG oslo_concurrency.lockutils [req-491a4e1c-314a-4daa-a11b-9f965cf06e0f req-327ae0fe-b4c4-4c77-b65e-b29e5e6ace2c service nova] Acquiring lock "refresh_cache-20f750d7-1914-49bb-802f-464a30ffcf3a" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.368897] env[69328]: DEBUG nova.compute.manager [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 914.385743] env[69328]: DEBUG oslo_vmware.api [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273526, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.464305} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.386127] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 3ba646e8-a5c8-4917-a1c4-32b37affb598/3ba646e8-a5c8-4917-a1c4-32b37affb598.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 914.386458] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 914.386795] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9f55a9f7-4c81-4fa1-9d84-f3c029b777f2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.394916] env[69328]: DEBUG oslo_vmware.api [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 914.394916] env[69328]: value = "task-3273527" [ 914.394916] env[69328]: _type = "Task" [ 914.394916] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.404316] env[69328]: DEBUG oslo_vmware.api [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273527, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.459528] env[69328]: DEBUG oslo_concurrency.lockutils [None req-27776706-5241-4665-a52a-b96bfa6a670b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 914.587823] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Releasing lock "refresh_cache-20f750d7-1914-49bb-802f-464a30ffcf3a" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 914.588220] env[69328]: DEBUG nova.compute.manager [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 20f750d7-1914-49bb-802f-464a30ffcf3a] Instance network_info: |[{"id": "e4e1721b-99c8-403d-b056-e89c1a106a41", "address": "fa:16:3e:2e:52:de", "network": {"id": "7f5dcab4-3cec-42a1-b589-88cb373af645", "bridge": "br-int", "label": "tempest-ServersTestJSON-1833802368-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d86de4d5055642aa86a29c6768e3db46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b94712a6-b777-47dd-bc06-f9acfce2d936", "external-id": "nsx-vlan-transportzone-494", "segmentation_id": 494, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4e1721b-99", "ovs_interfaceid": "e4e1721b-99c8-403d-b056-e89c1a106a41", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 914.588532] env[69328]: DEBUG oslo_concurrency.lockutils [req-491a4e1c-314a-4daa-a11b-9f965cf06e0f req-327ae0fe-b4c4-4c77-b65e-b29e5e6ace2c service nova] Acquired lock "refresh_cache-20f750d7-1914-49bb-802f-464a30ffcf3a" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 914.588712] env[69328]: DEBUG nova.network.neutron [req-491a4e1c-314a-4daa-a11b-9f965cf06e0f req-327ae0fe-b4c4-4c77-b65e-b29e5e6ace2c service nova] [instance: 20f750d7-1914-49bb-802f-464a30ffcf3a] Refreshing network info cache for port e4e1721b-99c8-403d-b056-e89c1a106a41 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 914.590457] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 20f750d7-1914-49bb-802f-464a30ffcf3a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2e:52:de', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b94712a6-b777-47dd-bc06-f9acfce2d936', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e4e1721b-99c8-403d-b056-e89c1a106a41', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 914.598919] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 914.600167] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 20f750d7-1914-49bb-802f-464a30ffcf3a] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 914.600516] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-766c1349-fcd7-4156-b51d-e97023bd23f4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.622625] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 914.622625] env[69328]: value = "task-3273528" [ 914.622625] env[69328]: _type = "Task" [ 914.622625] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.632950] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273528, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.671642] env[69328]: ERROR nova.scheduler.client.report [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [req-7ae5cccb-f605-49ce-b8db-cdea8648371b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-7ae5cccb-f605-49ce-b8db-cdea8648371b"}]} [ 914.690181] env[69328]: DEBUG nova.scheduler.client.report [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Refreshing inventories for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 914.704734] env[69328]: DEBUG nova.scheduler.client.report [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Updating ProviderTree inventory for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 914.704734] env[69328]: DEBUG nova.compute.provider_tree [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 914.713092] env[69328]: DEBUG nova.compute.manager [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 914.718766] env[69328]: DEBUG nova.scheduler.client.report [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Refreshing aggregate associations for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e, aggregates: None {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 914.736346] env[69328]: DEBUG nova.scheduler.client.report [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Refreshing trait associations for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e, traits: COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 914.741333] env[69328]: DEBUG nova.virt.hardware [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 914.741599] env[69328]: DEBUG nova.virt.hardware [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 914.741769] env[69328]: DEBUG nova.virt.hardware [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 914.741956] env[69328]: DEBUG nova.virt.hardware [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 914.742147] env[69328]: DEBUG nova.virt.hardware [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 914.742316] env[69328]: DEBUG nova.virt.hardware [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 914.742547] env[69328]: DEBUG nova.virt.hardware [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 914.742722] env[69328]: DEBUG nova.virt.hardware [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 914.742903] env[69328]: DEBUG nova.virt.hardware [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 914.743077] env[69328]: DEBUG nova.virt.hardware [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 914.743268] env[69328]: DEBUG nova.virt.hardware [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 914.744158] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c8a2b81-f468-4a71-8261-89d04d65d73b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.754491] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09a81e3c-de0f-434a-8c81-432082029af0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.775410] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: edb1a21a-6907-4198-a977-c1213e8fecc0] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 914.889303] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 914.906935] env[69328]: DEBUG oslo_vmware.api [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273527, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065825} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.907244] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 914.908267] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8e78188-318d-4802-b9e9-61472b38955b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.933544] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Reconfiguring VM instance instance-00000046 to attach disk [datastore2] 3ba646e8-a5c8-4917-a1c4-32b37affb598/3ba646e8-a5c8-4917-a1c4-32b37affb598.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 914.934123] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-08eb8a10-611d-4f10-9785-53dd5617ae2f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.954103] env[69328]: DEBUG oslo_vmware.api [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 914.954103] env[69328]: value = "task-3273529" [ 914.954103] env[69328]: _type = "Task" [ 914.954103] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.964852] env[69328]: DEBUG oslo_vmware.api [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273529, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.060276] env[69328]: DEBUG nova.network.neutron [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Successfully updated port: 2a3862dd-bd04-40ed-9d66-1fa2418297ea {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 915.135453] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273528, 'name': CreateVM_Task, 'duration_secs': 0.361718} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.137659] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 20f750d7-1914-49bb-802f-464a30ffcf3a] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 915.138451] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 915.138499] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 915.138800] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 915.139562] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf8f5b58-d75f-47e3-9315-a2f210380d8a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.147106] env[69328]: DEBUG oslo_vmware.api [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 915.147106] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d5f39e-4312-2074-1722-f58e96c383f0" [ 915.147106] env[69328]: _type = "Task" [ 915.147106] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.154951] env[69328]: DEBUG oslo_vmware.api [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d5f39e-4312-2074-1722-f58e96c383f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.156474] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eefaeb73-bd35-47e1-913a-1374ea0fa49a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.162799] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41419c71-e7e4-440b-88b8-6a60e1b4beb9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.193608] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ab2c90e-df36-4702-9158-41f2b2e9125c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.203204] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-095d283b-f070-4711-97c9-a45dd2e89617 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.217408] env[69328]: DEBUG nova.compute.provider_tree [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 915.250543] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "fd72bae3-cb72-48d0-a0df-9ea3a770a86c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 915.250782] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "fd72bae3-cb72-48d0-a0df-9ea3a770a86c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 915.250959] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "fd72bae3-cb72-48d0-a0df-9ea3a770a86c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 915.278633] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 26feb2d1-ff64-4a13-af83-b6d5fe4348e1] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 915.361131] env[69328]: DEBUG nova.network.neutron [req-491a4e1c-314a-4daa-a11b-9f965cf06e0f req-327ae0fe-b4c4-4c77-b65e-b29e5e6ace2c service nova] [instance: 20f750d7-1914-49bb-802f-464a30ffcf3a] Updated VIF entry in instance network info cache for port e4e1721b-99c8-403d-b056-e89c1a106a41. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 915.361131] env[69328]: DEBUG nova.network.neutron [req-491a4e1c-314a-4daa-a11b-9f965cf06e0f req-327ae0fe-b4c4-4c77-b65e-b29e5e6ace2c service nova] [instance: 20f750d7-1914-49bb-802f-464a30ffcf3a] Updating instance_info_cache with network_info: [{"id": "e4e1721b-99c8-403d-b056-e89c1a106a41", "address": "fa:16:3e:2e:52:de", "network": {"id": "7f5dcab4-3cec-42a1-b589-88cb373af645", "bridge": "br-int", "label": "tempest-ServersTestJSON-1833802368-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d86de4d5055642aa86a29c6768e3db46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b94712a6-b777-47dd-bc06-f9acfce2d936", "external-id": "nsx-vlan-transportzone-494", "segmentation_id": 494, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4e1721b-99", "ovs_interfaceid": "e4e1721b-99c8-403d-b056-e89c1a106a41", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 915.464143] env[69328]: DEBUG oslo_vmware.api [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273529, 'name': ReconfigVM_Task, 'duration_secs': 0.469309} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.464493] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Reconfigured VM instance instance-00000046 to attach disk [datastore2] 3ba646e8-a5c8-4917-a1c4-32b37affb598/3ba646e8-a5c8-4917-a1c4-32b37affb598.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 915.465139] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-42333d31-fa8c-4149-97a2-4cff80e541fa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.471831] env[69328]: DEBUG oslo_vmware.api [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 915.471831] env[69328]: value = "task-3273530" [ 915.471831] env[69328]: _type = "Task" [ 915.471831] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.479604] env[69328]: DEBUG oslo_vmware.api [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273530, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.565403] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquiring lock "refresh_cache-c751ef77-c3be-46cd-b7eb-fe139bf0998b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 915.565578] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquired lock "refresh_cache-c751ef77-c3be-46cd-b7eb-fe139bf0998b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 915.565739] env[69328]: DEBUG nova.network.neutron [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 915.657891] env[69328]: DEBUG oslo_vmware.api [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d5f39e-4312-2074-1722-f58e96c383f0, 'name': SearchDatastore_Task, 'duration_secs': 0.024567} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.657891] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 915.658105] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 20f750d7-1914-49bb-802f-464a30ffcf3a] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 915.658152] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 915.658304] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 915.658516] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 915.658829] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eec4169b-c552-4d75-840f-7235890a64ef {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.667845] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 915.668048] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 915.668764] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dbc66364-7fcf-4754-b4da-0a60d27f9296 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.674186] env[69328]: DEBUG oslo_vmware.api [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 915.674186] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]522c0653-425e-7943-103e-4ccaac3281bc" [ 915.674186] env[69328]: _type = "Task" [ 915.674186] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.681898] env[69328]: DEBUG oslo_vmware.api [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]522c0653-425e-7943-103e-4ccaac3281bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.753613] env[69328]: DEBUG nova.scheduler.client.report [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Updated inventory for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with generation 106 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 915.753883] env[69328]: DEBUG nova.compute.provider_tree [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Updating resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e generation from 106 to 107 during operation: update_inventory {{(pid=69328) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 915.754074] env[69328]: DEBUG nova.compute.provider_tree [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 915.784154] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: a798c3f2-ccde-488e-8a14-21f4a04f8e12] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 915.864766] env[69328]: DEBUG oslo_concurrency.lockutils [req-491a4e1c-314a-4daa-a11b-9f965cf06e0f req-327ae0fe-b4c4-4c77-b65e-b29e5e6ace2c service nova] Releasing lock "refresh_cache-20f750d7-1914-49bb-802f-464a30ffcf3a" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 915.983021] env[69328]: DEBUG oslo_vmware.api [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273530, 'name': Rename_Task, 'duration_secs': 0.254813} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.983133] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 915.983307] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5ab75844-8010-4156-b644-6cda3b0af0ac {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.990105] env[69328]: DEBUG oslo_vmware.api [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 915.990105] env[69328]: value = "task-3273531" [ 915.990105] env[69328]: _type = "Task" [ 915.990105] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.997808] env[69328]: DEBUG oslo_vmware.api [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273531, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.108737] env[69328]: DEBUG nova.network.neutron [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 916.184588] env[69328]: DEBUG oslo_vmware.api [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]522c0653-425e-7943-103e-4ccaac3281bc, 'name': SearchDatastore_Task, 'duration_secs': 0.008417} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.185489] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-561ef7e2-0cb5-465d-9181-df4ef8756d59 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.190730] env[69328]: DEBUG oslo_vmware.api [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 916.190730] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5280e6b9-f9ea-be7f-6a3e-0fd4a6fe2e36" [ 916.190730] env[69328]: _type = "Task" [ 916.190730] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.198753] env[69328]: DEBUG oslo_vmware.api [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5280e6b9-f9ea-be7f-6a3e-0fd4a6fe2e36, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.232538] env[69328]: DEBUG nova.network.neutron [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Updating instance_info_cache with network_info: [{"id": "2a3862dd-bd04-40ed-9d66-1fa2418297ea", "address": "fa:16:3e:ad:2d:2a", "network": {"id": "bd41ac10-1850-45a2-8e46-6ebdca3d8e13", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-766393176-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efd0e2d2f9ba4416bd8fd08dad912465", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e614f8e-6b11-4b6b-a421-904bca6acd91", "external-id": "nsx-vlan-transportzone-923", "segmentation_id": 923, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a3862dd-bd", "ovs_interfaceid": "2a3862dd-bd04-40ed-9d66-1fa2418297ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 916.260596] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.573s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 916.260596] env[69328]: DEBUG nova.compute.manager [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 916.265651] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e9757ca2-942a-4ea5-b6e2-d822199969ea tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.765s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 916.265842] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e9757ca2-942a-4ea5-b6e2-d822199969ea tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 916.267604] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.472s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 916.269083] env[69328]: INFO nova.compute.claims [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 916.286935] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: d4c08ccc-f1a0-4a2d-972c-b2d4a8961fa2] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 916.293014] env[69328]: INFO nova.scheduler.client.report [None req-e9757ca2-942a-4ea5-b6e2-d822199969ea tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Deleted allocations for instance 7232ad5c-9f4e-425e-824a-4c3750f665eb [ 916.317298] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "refresh_cache-fd72bae3-cb72-48d0-a0df-9ea3a770a86c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 916.317488] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquired lock "refresh_cache-fd72bae3-cb72-48d0-a0df-9ea3a770a86c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 916.317664] env[69328]: DEBUG nova.network.neutron [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 916.347176] env[69328]: DEBUG nova.compute.manager [req-f552348b-7cbb-4f55-b9a5-8174f360860d req-7ec1bc44-d739-494d-b455-488d3d6b2b21 service nova] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Received event network-vif-plugged-2a3862dd-bd04-40ed-9d66-1fa2418297ea {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 916.347408] env[69328]: DEBUG oslo_concurrency.lockutils [req-f552348b-7cbb-4f55-b9a5-8174f360860d req-7ec1bc44-d739-494d-b455-488d3d6b2b21 service nova] Acquiring lock "c751ef77-c3be-46cd-b7eb-fe139bf0998b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 916.347656] env[69328]: DEBUG oslo_concurrency.lockutils [req-f552348b-7cbb-4f55-b9a5-8174f360860d req-7ec1bc44-d739-494d-b455-488d3d6b2b21 service nova] Lock "c751ef77-c3be-46cd-b7eb-fe139bf0998b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 916.347823] env[69328]: DEBUG oslo_concurrency.lockutils [req-f552348b-7cbb-4f55-b9a5-8174f360860d req-7ec1bc44-d739-494d-b455-488d3d6b2b21 service nova] Lock "c751ef77-c3be-46cd-b7eb-fe139bf0998b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 916.348030] env[69328]: DEBUG nova.compute.manager [req-f552348b-7cbb-4f55-b9a5-8174f360860d req-7ec1bc44-d739-494d-b455-488d3d6b2b21 service nova] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] No waiting events found dispatching network-vif-plugged-2a3862dd-bd04-40ed-9d66-1fa2418297ea {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 916.348244] env[69328]: WARNING nova.compute.manager [req-f552348b-7cbb-4f55-b9a5-8174f360860d req-7ec1bc44-d739-494d-b455-488d3d6b2b21 service nova] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Received unexpected event network-vif-plugged-2a3862dd-bd04-40ed-9d66-1fa2418297ea for instance with vm_state building and task_state spawning. [ 916.348469] env[69328]: DEBUG nova.compute.manager [req-f552348b-7cbb-4f55-b9a5-8174f360860d req-7ec1bc44-d739-494d-b455-488d3d6b2b21 service nova] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Received event network-changed-2a3862dd-bd04-40ed-9d66-1fa2418297ea {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 916.348558] env[69328]: DEBUG nova.compute.manager [req-f552348b-7cbb-4f55-b9a5-8174f360860d req-7ec1bc44-d739-494d-b455-488d3d6b2b21 service nova] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Refreshing instance network info cache due to event network-changed-2a3862dd-bd04-40ed-9d66-1fa2418297ea. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 916.348726] env[69328]: DEBUG oslo_concurrency.lockutils [req-f552348b-7cbb-4f55-b9a5-8174f360860d req-7ec1bc44-d739-494d-b455-488d3d6b2b21 service nova] Acquiring lock "refresh_cache-c751ef77-c3be-46cd-b7eb-fe139bf0998b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 916.500061] env[69328]: DEBUG oslo_vmware.api [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273531, 'name': PowerOnVM_Task, 'duration_secs': 0.443303} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.500270] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 916.500469] env[69328]: INFO nova.compute.manager [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Took 6.86 seconds to spawn the instance on the hypervisor. [ 916.500646] env[69328]: DEBUG nova.compute.manager [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 916.501396] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e75b43a1-b1c2-4d9f-8ecc-d10903da315f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.702103] env[69328]: DEBUG oslo_vmware.api [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5280e6b9-f9ea-be7f-6a3e-0fd4a6fe2e36, 'name': SearchDatastore_Task, 'duration_secs': 0.009532} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.702369] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 916.702770] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 20f750d7-1914-49bb-802f-464a30ffcf3a/20f750d7-1914-49bb-802f-464a30ffcf3a.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 916.702961] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a6e14533-d95e-4076-b818-6390dc186e1b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.710164] env[69328]: DEBUG oslo_vmware.api [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 916.710164] env[69328]: value = "task-3273532" [ 916.710164] env[69328]: _type = "Task" [ 916.710164] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.719349] env[69328]: DEBUG oslo_vmware.api [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273532, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.736779] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Releasing lock "refresh_cache-c751ef77-c3be-46cd-b7eb-fe139bf0998b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 916.736779] env[69328]: DEBUG nova.compute.manager [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Instance network_info: |[{"id": "2a3862dd-bd04-40ed-9d66-1fa2418297ea", "address": "fa:16:3e:ad:2d:2a", "network": {"id": "bd41ac10-1850-45a2-8e46-6ebdca3d8e13", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-766393176-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efd0e2d2f9ba4416bd8fd08dad912465", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e614f8e-6b11-4b6b-a421-904bca6acd91", "external-id": "nsx-vlan-transportzone-923", "segmentation_id": 923, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a3862dd-bd", "ovs_interfaceid": "2a3862dd-bd04-40ed-9d66-1fa2418297ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 916.736779] env[69328]: DEBUG oslo_concurrency.lockutils [req-f552348b-7cbb-4f55-b9a5-8174f360860d req-7ec1bc44-d739-494d-b455-488d3d6b2b21 service nova] Acquired lock "refresh_cache-c751ef77-c3be-46cd-b7eb-fe139bf0998b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 916.736779] env[69328]: DEBUG nova.network.neutron [req-f552348b-7cbb-4f55-b9a5-8174f360860d req-7ec1bc44-d739-494d-b455-488d3d6b2b21 service nova] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Refreshing network info cache for port 2a3862dd-bd04-40ed-9d66-1fa2418297ea {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 916.737171] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ad:2d:2a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2e614f8e-6b11-4b6b-a421-904bca6acd91', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2a3862dd-bd04-40ed-9d66-1fa2418297ea', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 916.744784] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Creating folder: Project (efd0e2d2f9ba4416bd8fd08dad912465). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 916.745858] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8f431bc4-f947-4990-bfe0-15ce1cba6b08 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.757702] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Created folder: Project (efd0e2d2f9ba4416bd8fd08dad912465) in parent group-v653649. [ 916.757876] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Creating folder: Instances. Parent ref: group-v653856. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 916.758138] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a9c59fc1-e7a1-4e44-be2c-0425cbee10f2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.766970] env[69328]: DEBUG nova.compute.utils [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 916.769559] env[69328]: DEBUG nova.compute.manager [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 916.769765] env[69328]: DEBUG nova.network.neutron [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 916.771697] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Created folder: Instances in parent group-v653856. [ 916.771976] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 916.772478] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 916.775358] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c49ee20b-3fc0-480f-8fc7-22336851f8ec {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.792469] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: ed10d511-dbed-4884-8ac6-f737173f62c5] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 916.801205] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 916.801205] env[69328]: value = "task-3273535" [ 916.801205] env[69328]: _type = "Task" [ 916.801205] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.801775] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e9757ca2-942a-4ea5-b6e2-d822199969ea tempest-ServerAddressesNegativeTestJSON-1881773867 tempest-ServerAddressesNegativeTestJSON-1881773867-project-member] Lock "7232ad5c-9f4e-425e-824a-4c3750f665eb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.216s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 916.812916] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273535, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.837459] env[69328]: DEBUG nova.policy [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '047fba3350d249e6b48eda735fc10786', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'efd0e2d2f9ba4416bd8fd08dad912465', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 917.023407] env[69328]: INFO nova.compute.manager [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Took 48.83 seconds to build instance. [ 917.059554] env[69328]: DEBUG nova.network.neutron [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Updating instance_info_cache with network_info: [{"id": "eebd5d04-278d-4e22-9e5d-df5ae37877cf", "address": "fa:16:3e:d8:d9:39", "network": {"id": "e2ab6957-2c9d-4b95-91bd-5a62d9a284ba", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-967470666-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75d5853e3c724d02bacfa75173e38ab3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abcf0d10-3f3f-45dc-923e-1c78766e2dad", "external-id": "nsx-vlan-transportzone-405", "segmentation_id": 405, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeebd5d04-27", "ovs_interfaceid": "eebd5d04-278d-4e22-9e5d-df5ae37877cf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 917.112070] env[69328]: DEBUG nova.network.neutron [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Successfully created port: 7843ca64-fb43-4866-9bd7-f10b7c7e085e {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 917.220545] env[69328]: DEBUG oslo_vmware.api [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273532, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.492761} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.220920] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 20f750d7-1914-49bb-802f-464a30ffcf3a/20f750d7-1914-49bb-802f-464a30ffcf3a.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 917.221297] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 20f750d7-1914-49bb-802f-464a30ffcf3a] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 917.221441] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e2ef68cb-f2d4-4c25-ae21-fade7ce7e28a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.227984] env[69328]: DEBUG oslo_vmware.api [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 917.227984] env[69328]: value = "task-3273536" [ 917.227984] env[69328]: _type = "Task" [ 917.227984] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.237710] env[69328]: DEBUG oslo_vmware.api [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273536, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.274011] env[69328]: DEBUG nova.compute.manager [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 917.299050] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: d97dc6d5-e55f-4b9e-91e6-cfdea82f5236] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 917.313099] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273535, 'name': CreateVM_Task, 'duration_secs': 0.439713} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.315410] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 917.316151] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.316250] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 917.316536] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 917.317078] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15465d4b-fd67-4e84-89b9-9838afe38d48 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.324878] env[69328]: DEBUG oslo_vmware.api [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 917.324878] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52847489-bc75-9a64-4278-12c0595c0e17" [ 917.324878] env[69328]: _type = "Task" [ 917.324878] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.334480] env[69328]: DEBUG oslo_vmware.api [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52847489-bc75-9a64-4278-12c0595c0e17, 'name': SearchDatastore_Task, 'duration_secs': 0.009967} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.341022] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 917.341022] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 917.341022] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.341022] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 917.341022] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 917.341022] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-11bc104e-88e9-4210-b9a6-cae482dd7d68 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.348140] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 917.348322] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 917.349053] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7912507c-ed7d-4fcc-b9dc-24fc84dfbf40 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.354226] env[69328]: DEBUG oslo_vmware.api [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 917.354226] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b6f0d4-62d2-37bb-942d-747486e709f6" [ 917.354226] env[69328]: _type = "Task" [ 917.354226] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.363054] env[69328]: DEBUG oslo_vmware.api [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b6f0d4-62d2-37bb-942d-747486e709f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.529263] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5efbdf17-ee43-4f60-9aaf-faf9bd099917 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "3ba646e8-a5c8-4917-a1c4-32b37affb598" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.341s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 917.563927] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Releasing lock "refresh_cache-fd72bae3-cb72-48d0-a0df-9ea3a770a86c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 917.590813] env[69328]: DEBUG nova.network.neutron [req-f552348b-7cbb-4f55-b9a5-8174f360860d req-7ec1bc44-d739-494d-b455-488d3d6b2b21 service nova] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Updated VIF entry in instance network info cache for port 2a3862dd-bd04-40ed-9d66-1fa2418297ea. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 917.591266] env[69328]: DEBUG nova.network.neutron [req-f552348b-7cbb-4f55-b9a5-8174f360860d req-7ec1bc44-d739-494d-b455-488d3d6b2b21 service nova] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Updating instance_info_cache with network_info: [{"id": "2a3862dd-bd04-40ed-9d66-1fa2418297ea", "address": "fa:16:3e:ad:2d:2a", "network": {"id": "bd41ac10-1850-45a2-8e46-6ebdca3d8e13", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-766393176-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efd0e2d2f9ba4416bd8fd08dad912465", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e614f8e-6b11-4b6b-a421-904bca6acd91", "external-id": "nsx-vlan-transportzone-923", "segmentation_id": 923, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a3862dd-bd", "ovs_interfaceid": "2a3862dd-bd04-40ed-9d66-1fa2418297ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 917.682149] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5350cb4-b764-4d91-af31-84a8dc4443b1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.690472] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ad49bd0-e3c9-4165-8cf1-40592438f7cd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.727435] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa34168e-f580-459f-a174-60cc570a88b0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.742122] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1ff1570-2eed-449b-a79e-f56b54c31495 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.748715] env[69328]: DEBUG oslo_vmware.api [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273536, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069243} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.748954] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 20f750d7-1914-49bb-802f-464a30ffcf3a] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 917.750222] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf309f62-fd68-4494-ab9a-a872d59b1ccf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.761788] env[69328]: DEBUG nova.compute.provider_tree [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 917.802145] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 20f750d7-1914-49bb-802f-464a30ffcf3a] Reconfiguring VM instance instance-00000047 to attach disk [datastore1] 20f750d7-1914-49bb-802f-464a30ffcf3a/20f750d7-1914-49bb-802f-464a30ffcf3a.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 917.804135] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: caba3b5c-db15-4de6-8d3d-41f6751f1b83] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 917.805928] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-beea274d-86e2-4a99-bd96-1aab2998899f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.832703] env[69328]: DEBUG oslo_vmware.api [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 917.832703] env[69328]: value = "task-3273537" [ 917.832703] env[69328]: _type = "Task" [ 917.832703] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.843218] env[69328]: DEBUG oslo_vmware.api [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273537, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.864959] env[69328]: DEBUG oslo_vmware.api [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b6f0d4-62d2-37bb-942d-747486e709f6, 'name': SearchDatastore_Task, 'duration_secs': 0.008801} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.865756] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e414233b-7bf8-4bf3-b88b-6432d1f12e0f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.871581] env[69328]: DEBUG oslo_vmware.api [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 917.871581] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e003d7-80a5-4236-0c0d-6a4329c73259" [ 917.871581] env[69328]: _type = "Task" [ 917.871581] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.879399] env[69328]: DEBUG oslo_vmware.api [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e003d7-80a5-4236-0c0d-6a4329c73259, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.086092] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4ce6efa-0ea2-4825-b415-76fc52cb023d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.106074] env[69328]: DEBUG oslo_concurrency.lockutils [req-f552348b-7cbb-4f55-b9a5-8174f360860d req-7ec1bc44-d739-494d-b455-488d3d6b2b21 service nova] Releasing lock "refresh_cache-c751ef77-c3be-46cd-b7eb-fe139bf0998b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 918.107448] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8ee2823-1470-4799-a995-70a86510c5e8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.114951] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Updating instance 'fd72bae3-cb72-48d0-a0df-9ea3a770a86c' progress to 83 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 918.304384] env[69328]: DEBUG nova.scheduler.client.report [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 918.308306] env[69328]: DEBUG nova.compute.manager [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 918.326288] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 50b84adc-5ff3-4a1e-a09f-5c96daef9b87] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 918.342202] env[69328]: DEBUG nova.virt.hardware [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 918.342460] env[69328]: DEBUG nova.virt.hardware [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 918.342681] env[69328]: DEBUG nova.virt.hardware [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 918.342919] env[69328]: DEBUG nova.virt.hardware [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 918.344953] env[69328]: DEBUG nova.virt.hardware [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 918.349021] env[69328]: DEBUG nova.virt.hardware [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 918.349021] env[69328]: DEBUG nova.virt.hardware [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 918.349021] env[69328]: DEBUG nova.virt.hardware [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 918.349021] env[69328]: DEBUG nova.virt.hardware [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 918.349021] env[69328]: DEBUG nova.virt.hardware [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 918.349021] env[69328]: DEBUG nova.virt.hardware [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 918.349021] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4df546c3-12ea-4f11-8a08-fc6fa7cab9ce {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.357565] env[69328]: DEBUG oslo_vmware.api [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273537, 'name': ReconfigVM_Task, 'duration_secs': 0.416964} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.359793] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 20f750d7-1914-49bb-802f-464a30ffcf3a] Reconfigured VM instance instance-00000047 to attach disk [datastore1] 20f750d7-1914-49bb-802f-464a30ffcf3a/20f750d7-1914-49bb-802f-464a30ffcf3a.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 918.361115] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-adf416d2-b0ce-4d4e-b72d-ee2389c10c8e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.363738] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f490d151-8b01-43c6-97ef-5dc7a47d3675 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.383988] env[69328]: DEBUG oslo_vmware.api [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 918.383988] env[69328]: value = "task-3273538" [ 918.383988] env[69328]: _type = "Task" [ 918.383988] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.401059] env[69328]: DEBUG nova.compute.manager [req-62207c8d-a9eb-4024-8b6d-b23f640d0b72 req-613426da-848c-45d8-ad9e-0d5bd8063cd8 service nova] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Received event network-changed-6fa9c0fb-f285-4d44-8824-09041fd2f8f6 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 918.401059] env[69328]: DEBUG nova.compute.manager [req-62207c8d-a9eb-4024-8b6d-b23f640d0b72 req-613426da-848c-45d8-ad9e-0d5bd8063cd8 service nova] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Refreshing instance network info cache due to event network-changed-6fa9c0fb-f285-4d44-8824-09041fd2f8f6. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 918.401059] env[69328]: DEBUG oslo_concurrency.lockutils [req-62207c8d-a9eb-4024-8b6d-b23f640d0b72 req-613426da-848c-45d8-ad9e-0d5bd8063cd8 service nova] Acquiring lock "refresh_cache-3ba646e8-a5c8-4917-a1c4-32b37affb598" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 918.401059] env[69328]: DEBUG oslo_concurrency.lockutils [req-62207c8d-a9eb-4024-8b6d-b23f640d0b72 req-613426da-848c-45d8-ad9e-0d5bd8063cd8 service nova] Acquired lock "refresh_cache-3ba646e8-a5c8-4917-a1c4-32b37affb598" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 918.401059] env[69328]: DEBUG nova.network.neutron [req-62207c8d-a9eb-4024-8b6d-b23f640d0b72 req-613426da-848c-45d8-ad9e-0d5bd8063cd8 service nova] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Refreshing network info cache for port 6fa9c0fb-f285-4d44-8824-09041fd2f8f6 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 918.401059] env[69328]: DEBUG oslo_vmware.api [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e003d7-80a5-4236-0c0d-6a4329c73259, 'name': SearchDatastore_Task, 'duration_secs': 0.014452} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.401579] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 918.401707] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] c751ef77-c3be-46cd-b7eb-fe139bf0998b/c751ef77-c3be-46cd-b7eb-fe139bf0998b.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 918.402211] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5f22c5ca-65df-47b4-8b97-7cb5a391b163 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.408338] env[69328]: DEBUG oslo_vmware.api [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273538, 'name': Rename_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.413135] env[69328]: DEBUG oslo_vmware.api [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 918.413135] env[69328]: value = "task-3273539" [ 918.413135] env[69328]: _type = "Task" [ 918.413135] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.424330] env[69328]: DEBUG oslo_vmware.api [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273539, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.622222] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 918.623491] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3ce98ccd-11ec-4227-8901-691e8a85c82d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.635374] env[69328]: DEBUG oslo_vmware.api [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 918.635374] env[69328]: value = "task-3273540" [ 918.635374] env[69328]: _type = "Task" [ 918.635374] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.643709] env[69328]: DEBUG oslo_vmware.api [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273540, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.660535] env[69328]: DEBUG nova.network.neutron [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Successfully updated port: 7843ca64-fb43-4866-9bd7-f10b7c7e085e {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 918.812196] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.544s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 918.812837] env[69328]: DEBUG nova.compute.manager [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 918.816083] env[69328]: DEBUG oslo_concurrency.lockutils [None req-45537925-9aff-448a-99db-99d3a6cd0b05 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.100s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 918.816354] env[69328]: DEBUG oslo_concurrency.lockutils [None req-45537925-9aff-448a-99db-99d3a6cd0b05 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 918.818734] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.083s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 918.820705] env[69328]: INFO nova.compute.claims [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 918.832211] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 230c6278-65af-4f5d-b817-0b695086c29d] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 918.858735] env[69328]: INFO nova.scheduler.client.report [None req-45537925-9aff-448a-99db-99d3a6cd0b05 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Deleted allocations for instance 25fb207b-9388-4198-bb48-ab7cebd43375 [ 918.897114] env[69328]: DEBUG oslo_vmware.api [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273538, 'name': Rename_Task, 'duration_secs': 0.137754} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.897558] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 20f750d7-1914-49bb-802f-464a30ffcf3a] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 918.897781] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-33592764-6536-4bdc-925d-2537e4623aaa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.907716] env[69328]: DEBUG oslo_vmware.api [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 918.907716] env[69328]: value = "task-3273541" [ 918.907716] env[69328]: _type = "Task" [ 918.907716] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.926772] env[69328]: DEBUG oslo_vmware.api [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273541, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.931569] env[69328]: DEBUG oslo_vmware.api [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273539, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.154113] env[69328]: DEBUG oslo_vmware.api [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273540, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.163971] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquiring lock "refresh_cache-1413dcfe-3570-4657-b811-81a1acc159d1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 919.164155] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquired lock "refresh_cache-1413dcfe-3570-4657-b811-81a1acc159d1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 919.164349] env[69328]: DEBUG nova.network.neutron [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 919.265521] env[69328]: DEBUG nova.network.neutron [req-62207c8d-a9eb-4024-8b6d-b23f640d0b72 req-613426da-848c-45d8-ad9e-0d5bd8063cd8 service nova] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Updated VIF entry in instance network info cache for port 6fa9c0fb-f285-4d44-8824-09041fd2f8f6. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 919.266218] env[69328]: DEBUG nova.network.neutron [req-62207c8d-a9eb-4024-8b6d-b23f640d0b72 req-613426da-848c-45d8-ad9e-0d5bd8063cd8 service nova] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Updating instance_info_cache with network_info: [{"id": "6fa9c0fb-f285-4d44-8824-09041fd2f8f6", "address": "fa:16:3e:96:60:e7", "network": {"id": "ce3bec03-01f9-4ac9-80e4-bfb944c0bb66", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-545997027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87581f423dc64e4fb9fe1d51ebc68597", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6fa9c0fb-f2", "ovs_interfaceid": "6fa9c0fb-f285-4d44-8824-09041fd2f8f6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 919.325897] env[69328]: DEBUG nova.compute.utils [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 919.331037] env[69328]: DEBUG nova.compute.manager [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 919.331244] env[69328]: DEBUG nova.network.neutron [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 919.337912] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 49a668a7-5967-46a9-823f-7f613d34d152] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 919.371263] env[69328]: DEBUG oslo_concurrency.lockutils [None req-45537925-9aff-448a-99db-99d3a6cd0b05 tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Lock "25fb207b-9388-4198-bb48-ab7cebd43375" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.148s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 919.375467] env[69328]: DEBUG nova.policy [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7fbdc3e734be4369884cfcf483b2678f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4e8bc0d144f44546bd21fb04277c998c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 919.422865] env[69328]: DEBUG oslo_vmware.api [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273541, 'name': PowerOnVM_Task} progress is 71%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.428108] env[69328]: DEBUG oslo_vmware.api [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273539, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.647864] env[69328]: DEBUG oslo_vmware.api [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273540, 'name': PowerOnVM_Task, 'duration_secs': 0.530078} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.648931] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 919.649494] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c64c5fb5-360e-4f16-9959-da559ef77b37 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Updating instance 'fd72bae3-cb72-48d0-a0df-9ea3a770a86c' progress to 100 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 919.697963] env[69328]: DEBUG nova.network.neutron [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] Successfully created port: 46bc5af6-cdfc-4468-936e-604560442c91 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 919.703360] env[69328]: DEBUG nova.network.neutron [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 919.769276] env[69328]: DEBUG oslo_concurrency.lockutils [req-62207c8d-a9eb-4024-8b6d-b23f640d0b72 req-613426da-848c-45d8-ad9e-0d5bd8063cd8 service nova] Releasing lock "refresh_cache-3ba646e8-a5c8-4917-a1c4-32b37affb598" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 919.831950] env[69328]: DEBUG nova.compute.manager [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 919.842486] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 676173ee-8001-48c6-bd28-09130f6dd99a] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 919.855458] env[69328]: DEBUG nova.network.neutron [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Updating instance_info_cache with network_info: [{"id": "7843ca64-fb43-4866-9bd7-f10b7c7e085e", "address": "fa:16:3e:30:b0:de", "network": {"id": "bd41ac10-1850-45a2-8e46-6ebdca3d8e13", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-766393176-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efd0e2d2f9ba4416bd8fd08dad912465", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e614f8e-6b11-4b6b-a421-904bca6acd91", "external-id": "nsx-vlan-transportzone-923", "segmentation_id": 923, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7843ca64-fb", "ovs_interfaceid": "7843ca64-fb43-4866-9bd7-f10b7c7e085e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 919.925214] env[69328]: DEBUG oslo_vmware.api [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273541, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.930900] env[69328]: DEBUG oslo_vmware.api [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273539, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.228524] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81e64656-42e9-4e85-abba-52f4d6eaab87 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.235717] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f175563-0811-4fd7-8595-e085e6eed64d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.269697] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-975d3d2f-2747-44de-94b3-318edf8b9e12 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.280012] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9702d9b1-70c0-4a8e-8d18-516fb31ef281 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.293636] env[69328]: DEBUG nova.compute.provider_tree [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 920.345175] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 88f9f0c2-0c55-45bf-a494-8f1ee4922443] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 920.361301] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Releasing lock "refresh_cache-1413dcfe-3570-4657-b811-81a1acc159d1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 920.361301] env[69328]: DEBUG nova.compute.manager [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Instance network_info: |[{"id": "7843ca64-fb43-4866-9bd7-f10b7c7e085e", "address": "fa:16:3e:30:b0:de", "network": {"id": "bd41ac10-1850-45a2-8e46-6ebdca3d8e13", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-766393176-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efd0e2d2f9ba4416bd8fd08dad912465", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e614f8e-6b11-4b6b-a421-904bca6acd91", "external-id": "nsx-vlan-transportzone-923", "segmentation_id": 923, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7843ca64-fb", "ovs_interfaceid": "7843ca64-fb43-4866-9bd7-f10b7c7e085e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 920.361301] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:30:b0:de', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2e614f8e-6b11-4b6b-a421-904bca6acd91', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7843ca64-fb43-4866-9bd7-f10b7c7e085e', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 920.368230] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 920.369110] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 920.369110] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3d22571a-f186-4b1d-b5b5-30b0fbe1bb5a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.391020] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 920.391020] env[69328]: value = "task-3273542" [ 920.391020] env[69328]: _type = "Task" [ 920.391020] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.398588] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273542, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.422129] env[69328]: DEBUG oslo_vmware.api [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273541, 'name': PowerOnVM_Task, 'duration_secs': 1.06226} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.422760] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 20f750d7-1914-49bb-802f-464a30ffcf3a] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 920.422957] env[69328]: INFO nova.compute.manager [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 20f750d7-1914-49bb-802f-464a30ffcf3a] Took 8.23 seconds to spawn the instance on the hypervisor. [ 920.423255] env[69328]: DEBUG nova.compute.manager [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 20f750d7-1914-49bb-802f-464a30ffcf3a] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 920.424033] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ca002e7-d56c-4bda-b582-724e07cf0bbf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.431117] env[69328]: DEBUG oslo_vmware.api [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273539, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.937135} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.431749] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] c751ef77-c3be-46cd-b7eb-fe139bf0998b/c751ef77-c3be-46cd-b7eb-fe139bf0998b.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 920.432009] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 920.432342] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-03caf6ad-c704-46a6-b3ec-e9359cc3ad71 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.441928] env[69328]: DEBUG oslo_vmware.api [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 920.441928] env[69328]: value = "task-3273543" [ 920.441928] env[69328]: _type = "Task" [ 920.441928] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.451757] env[69328]: DEBUG oslo_vmware.api [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273543, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.468774] env[69328]: DEBUG nova.compute.manager [req-5a74d646-1382-41d4-94f5-e2c3d340e356 req-be3e9a4d-f170-417d-b735-68e40105808f service nova] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Received event network-vif-plugged-7843ca64-fb43-4866-9bd7-f10b7c7e085e {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 920.468827] env[69328]: DEBUG oslo_concurrency.lockutils [req-5a74d646-1382-41d4-94f5-e2c3d340e356 req-be3e9a4d-f170-417d-b735-68e40105808f service nova] Acquiring lock "1413dcfe-3570-4657-b811-81a1acc159d1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 920.469105] env[69328]: DEBUG oslo_concurrency.lockutils [req-5a74d646-1382-41d4-94f5-e2c3d340e356 req-be3e9a4d-f170-417d-b735-68e40105808f service nova] Lock "1413dcfe-3570-4657-b811-81a1acc159d1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 920.469324] env[69328]: DEBUG oslo_concurrency.lockutils [req-5a74d646-1382-41d4-94f5-e2c3d340e356 req-be3e9a4d-f170-417d-b735-68e40105808f service nova] Lock "1413dcfe-3570-4657-b811-81a1acc159d1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 920.469483] env[69328]: DEBUG nova.compute.manager [req-5a74d646-1382-41d4-94f5-e2c3d340e356 req-be3e9a4d-f170-417d-b735-68e40105808f service nova] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] No waiting events found dispatching network-vif-plugged-7843ca64-fb43-4866-9bd7-f10b7c7e085e {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 920.469679] env[69328]: WARNING nova.compute.manager [req-5a74d646-1382-41d4-94f5-e2c3d340e356 req-be3e9a4d-f170-417d-b735-68e40105808f service nova] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Received unexpected event network-vif-plugged-7843ca64-fb43-4866-9bd7-f10b7c7e085e for instance with vm_state building and task_state spawning. [ 920.469887] env[69328]: DEBUG nova.compute.manager [req-5a74d646-1382-41d4-94f5-e2c3d340e356 req-be3e9a4d-f170-417d-b735-68e40105808f service nova] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Received event network-changed-7843ca64-fb43-4866-9bd7-f10b7c7e085e {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 920.469983] env[69328]: DEBUG nova.compute.manager [req-5a74d646-1382-41d4-94f5-e2c3d340e356 req-be3e9a4d-f170-417d-b735-68e40105808f service nova] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Refreshing instance network info cache due to event network-changed-7843ca64-fb43-4866-9bd7-f10b7c7e085e. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 920.470187] env[69328]: DEBUG oslo_concurrency.lockutils [req-5a74d646-1382-41d4-94f5-e2c3d340e356 req-be3e9a4d-f170-417d-b735-68e40105808f service nova] Acquiring lock "refresh_cache-1413dcfe-3570-4657-b811-81a1acc159d1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 920.470362] env[69328]: DEBUG oslo_concurrency.lockutils [req-5a74d646-1382-41d4-94f5-e2c3d340e356 req-be3e9a4d-f170-417d-b735-68e40105808f service nova] Acquired lock "refresh_cache-1413dcfe-3570-4657-b811-81a1acc159d1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 920.470569] env[69328]: DEBUG nova.network.neutron [req-5a74d646-1382-41d4-94f5-e2c3d340e356 req-be3e9a4d-f170-417d-b735-68e40105808f service nova] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Refreshing network info cache for port 7843ca64-fb43-4866-9bd7-f10b7c7e085e {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 920.799326] env[69328]: DEBUG nova.scheduler.client.report [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 920.838496] env[69328]: DEBUG oslo_concurrency.lockutils [None req-039d3904-cd51-4387-9b2a-bb7e74f42aee tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Acquiring lock "d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 920.838762] env[69328]: DEBUG oslo_concurrency.lockutils [None req-039d3904-cd51-4387-9b2a-bb7e74f42aee tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Lock "d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 920.838963] env[69328]: DEBUG oslo_concurrency.lockutils [None req-039d3904-cd51-4387-9b2a-bb7e74f42aee tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Acquiring lock "d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 920.839168] env[69328]: DEBUG oslo_concurrency.lockutils [None req-039d3904-cd51-4387-9b2a-bb7e74f42aee tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Lock "d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 920.839336] env[69328]: DEBUG oslo_concurrency.lockutils [None req-039d3904-cd51-4387-9b2a-bb7e74f42aee tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Lock "d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 920.842716] env[69328]: INFO nova.compute.manager [None req-039d3904-cd51-4387-9b2a-bb7e74f42aee tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Terminating instance [ 920.845059] env[69328]: DEBUG nova.compute.manager [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 920.849336] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 920.849528] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Cleaning up deleted instances with incomplete migration {{(pid=69328) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11872}} [ 920.870690] env[69328]: DEBUG nova.virt.hardware [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 920.870940] env[69328]: DEBUG nova.virt.hardware [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 920.871107] env[69328]: DEBUG nova.virt.hardware [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 920.871295] env[69328]: DEBUG nova.virt.hardware [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 920.871441] env[69328]: DEBUG nova.virt.hardware [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 920.871662] env[69328]: DEBUG nova.virt.hardware [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 920.871893] env[69328]: DEBUG nova.virt.hardware [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 920.872088] env[69328]: DEBUG nova.virt.hardware [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 920.872257] env[69328]: DEBUG nova.virt.hardware [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 920.872420] env[69328]: DEBUG nova.virt.hardware [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 920.872617] env[69328]: DEBUG nova.virt.hardware [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 920.873701] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0552aa78-46df-49e0-a44a-263359a8241c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.881587] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-464447d7-dade-4d6b-a238-e8ae69bee462 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.908586] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273542, 'name': CreateVM_Task} progress is 25%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.951600] env[69328]: INFO nova.compute.manager [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 20f750d7-1914-49bb-802f-464a30ffcf3a] Took 49.23 seconds to build instance. [ 920.955148] env[69328]: DEBUG oslo_vmware.api [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273543, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.148547} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.955611] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 920.956445] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-857b4ad8-3a5d-4dd1-a08e-722f3ec5dc7c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.979397] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Reconfiguring VM instance instance-00000048 to attach disk [datastore1] c751ef77-c3be-46cd-b7eb-fe139bf0998b/c751ef77-c3be-46cd-b7eb-fe139bf0998b.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 920.982888] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5e2a6560-8524-4c30-8d90-ef3dff2fb9ca {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.003184] env[69328]: DEBUG oslo_vmware.api [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 921.003184] env[69328]: value = "task-3273544" [ 921.003184] env[69328]: _type = "Task" [ 921.003184] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.011772] env[69328]: DEBUG oslo_vmware.api [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273544, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.252354] env[69328]: DEBUG nova.network.neutron [req-5a74d646-1382-41d4-94f5-e2c3d340e356 req-be3e9a4d-f170-417d-b735-68e40105808f service nova] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Updated VIF entry in instance network info cache for port 7843ca64-fb43-4866-9bd7-f10b7c7e085e. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 921.252787] env[69328]: DEBUG nova.network.neutron [req-5a74d646-1382-41d4-94f5-e2c3d340e356 req-be3e9a4d-f170-417d-b735-68e40105808f service nova] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Updating instance_info_cache with network_info: [{"id": "7843ca64-fb43-4866-9bd7-f10b7c7e085e", "address": "fa:16:3e:30:b0:de", "network": {"id": "bd41ac10-1850-45a2-8e46-6ebdca3d8e13", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-766393176-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efd0e2d2f9ba4416bd8fd08dad912465", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e614f8e-6b11-4b6b-a421-904bca6acd91", "external-id": "nsx-vlan-transportzone-923", "segmentation_id": 923, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7843ca64-fb", "ovs_interfaceid": "7843ca64-fb43-4866-9bd7-f10b7c7e085e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 921.274742] env[69328]: DEBUG nova.network.neutron [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] Successfully updated port: 46bc5af6-cdfc-4468-936e-604560442c91 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 921.305097] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.486s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 921.305603] env[69328]: DEBUG nova.compute.manager [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 921.307987] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3b4b7cd4-301e-4a1a-86eb-8920abfdcbd3 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.303s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 921.308220] env[69328]: DEBUG nova.objects.instance [None req-3b4b7cd4-301e-4a1a-86eb-8920abfdcbd3 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Lazy-loading 'resources' on Instance uuid 5a45bd6a-b063-4104-a85a-d78a4bb9452e {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 921.350749] env[69328]: DEBUG nova.compute.manager [None req-039d3904-cd51-4387-9b2a-bb7e74f42aee tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 921.351176] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-039d3904-cd51-4387-9b2a-bb7e74f42aee tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 921.353008] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3db2a0a4-0f2f-4980-858b-826a499ff407 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.356018] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 921.362933] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-039d3904-cd51-4387-9b2a-bb7e74f42aee tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 921.363892] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-18d29d39-5155-485a-816e-11d21bd9748d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.371202] env[69328]: DEBUG oslo_vmware.api [None req-039d3904-cd51-4387-9b2a-bb7e74f42aee tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Waiting for the task: (returnval){ [ 921.371202] env[69328]: value = "task-3273545" [ 921.371202] env[69328]: _type = "Task" [ 921.371202] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.379419] env[69328]: DEBUG oslo_vmware.api [None req-039d3904-cd51-4387-9b2a-bb7e74f42aee tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3273545, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.403886] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0790747f-001b-4009-889e-af44a3ab0781 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "20f750d7-1914-49bb-802f-464a30ffcf3a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 921.409589] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273542, 'name': CreateVM_Task, 'duration_secs': 0.702441} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.409775] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 921.410525] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 921.410721] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 921.411092] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 921.411389] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e65316e6-0ef4-4332-870e-2ab390619866 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.416110] env[69328]: DEBUG oslo_vmware.api [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 921.416110] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b7d470-1b13-3b19-e564-4b567e019223" [ 921.416110] env[69328]: _type = "Task" [ 921.416110] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.425075] env[69328]: DEBUG oslo_vmware.api [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b7d470-1b13-3b19-e564-4b567e019223, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.456426] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1cc51d3e-2541-4876-907c-8dda0e5079c2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "20f750d7-1914-49bb-802f-464a30ffcf3a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.739s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 921.456768] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0790747f-001b-4009-889e-af44a3ab0781 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "20f750d7-1914-49bb-802f-464a30ffcf3a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.053s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 921.457043] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0790747f-001b-4009-889e-af44a3ab0781 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "20f750d7-1914-49bb-802f-464a30ffcf3a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 921.457298] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0790747f-001b-4009-889e-af44a3ab0781 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "20f750d7-1914-49bb-802f-464a30ffcf3a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 921.457665] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0790747f-001b-4009-889e-af44a3ab0781 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "20f750d7-1914-49bb-802f-464a30ffcf3a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 921.461102] env[69328]: INFO nova.compute.manager [None req-0790747f-001b-4009-889e-af44a3ab0781 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 20f750d7-1914-49bb-802f-464a30ffcf3a] Terminating instance [ 921.515955] env[69328]: DEBUG oslo_vmware.api [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273544, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.756738] env[69328]: DEBUG oslo_concurrency.lockutils [req-5a74d646-1382-41d4-94f5-e2c3d340e356 req-be3e9a4d-f170-417d-b735-68e40105808f service nova] Releasing lock "refresh_cache-1413dcfe-3570-4657-b811-81a1acc159d1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 921.777511] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Acquiring lock "refresh_cache-73d5b248-3c3e-4e38-8d9c-1f9bfdb38494" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 921.777635] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Acquired lock "refresh_cache-73d5b248-3c3e-4e38-8d9c-1f9bfdb38494" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 921.777861] env[69328]: DEBUG nova.network.neutron [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 921.814514] env[69328]: DEBUG nova.compute.utils [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 921.816407] env[69328]: DEBUG nova.compute.manager [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 921.816407] env[69328]: DEBUG nova.network.neutron [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 921.860365] env[69328]: DEBUG nova.policy [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7a1e946caadd413985d7965125000b07', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f71a072b33154efe9636b50e25f93381', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 921.881133] env[69328]: DEBUG oslo_vmware.api [None req-039d3904-cd51-4387-9b2a-bb7e74f42aee tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3273545, 'name': PowerOffVM_Task, 'duration_secs': 0.240415} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.883562] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-039d3904-cd51-4387-9b2a-bb7e74f42aee tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 921.883749] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-039d3904-cd51-4387-9b2a-bb7e74f42aee tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 921.884164] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-62f38019-0466-4e22-8faf-edd604dd1c81 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.926519] env[69328]: DEBUG oslo_vmware.api [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b7d470-1b13-3b19-e564-4b567e019223, 'name': SearchDatastore_Task, 'duration_secs': 0.012094} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.929058] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 921.929304] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 921.929543] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 921.929689] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 921.929862] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 921.930313] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7544016d-5473-4ce0-97e9-faf551e29acd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.938732] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 921.938920] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 921.939658] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1519668d-cf95-4172-ab3c-25c69319a93a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.947402] env[69328]: DEBUG oslo_vmware.api [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 921.947402] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]526b682d-7753-9c43-0275-5a2ae7d2b5f1" [ 921.947402] env[69328]: _type = "Task" [ 921.947402] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.955459] env[69328]: DEBUG oslo_vmware.api [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]526b682d-7753-9c43-0275-5a2ae7d2b5f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.959048] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-039d3904-cd51-4387-9b2a-bb7e74f42aee tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 921.959257] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-039d3904-cd51-4387-9b2a-bb7e74f42aee tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 921.959432] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-039d3904-cd51-4387-9b2a-bb7e74f42aee tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Deleting the datastore file [datastore1] d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 921.960050] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-85da57fc-b616-4219-8591-38db9cb4a706 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.966404] env[69328]: DEBUG nova.compute.manager [None req-0790747f-001b-4009-889e-af44a3ab0781 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 20f750d7-1914-49bb-802f-464a30ffcf3a] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 921.966604] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0790747f-001b-4009-889e-af44a3ab0781 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 20f750d7-1914-49bb-802f-464a30ffcf3a] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 921.966922] env[69328]: DEBUG oslo_vmware.api [None req-039d3904-cd51-4387-9b2a-bb7e74f42aee tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Waiting for the task: (returnval){ [ 921.966922] env[69328]: value = "task-3273547" [ 921.966922] env[69328]: _type = "Task" [ 921.966922] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.967727] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abcbcbe7-04d8-4340-b430-f51a5efc92dd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.980591] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-0790747f-001b-4009-889e-af44a3ab0781 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 20f750d7-1914-49bb-802f-464a30ffcf3a] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 921.983224] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-45d7fedc-03cc-4514-8c05-6723d197724d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.984618] env[69328]: DEBUG oslo_vmware.api [None req-039d3904-cd51-4387-9b2a-bb7e74f42aee tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3273547, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.988572] env[69328]: DEBUG oslo_vmware.api [None req-0790747f-001b-4009-889e-af44a3ab0781 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 921.988572] env[69328]: value = "task-3273548" [ 921.988572] env[69328]: _type = "Task" [ 921.988572] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.996520] env[69328]: DEBUG oslo_vmware.api [None req-0790747f-001b-4009-889e-af44a3ab0781 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273548, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.014149] env[69328]: DEBUG oslo_vmware.api [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273544, 'name': ReconfigVM_Task, 'duration_secs': 0.558089} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.014421] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Reconfigured VM instance instance-00000048 to attach disk [datastore1] c751ef77-c3be-46cd-b7eb-fe139bf0998b/c751ef77-c3be-46cd-b7eb-fe139bf0998b.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 922.015184] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7ca20ef8-f070-414c-8b06-62324b76a5d8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.021320] env[69328]: DEBUG oslo_vmware.api [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 922.021320] env[69328]: value = "task-3273549" [ 922.021320] env[69328]: _type = "Task" [ 922.021320] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.032622] env[69328]: DEBUG oslo_vmware.api [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273549, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.147220] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b6e80060-f62c-47bc-822b-0bdeae508a3d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "fd72bae3-cb72-48d0-a0df-9ea3a770a86c" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 922.147458] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b6e80060-f62c-47bc-822b-0bdeae508a3d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "fd72bae3-cb72-48d0-a0df-9ea3a770a86c" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 922.147654] env[69328]: DEBUG nova.compute.manager [None req-b6e80060-f62c-47bc-822b-0bdeae508a3d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Going to confirm migration 3 {{(pid=69328) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 922.181142] env[69328]: DEBUG nova.network.neutron [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Successfully created port: 43596910-7f9b-47c0-972f-1dd3d779373c {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 922.190704] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f96df16-c8b9-4133-a44e-8b615b1568ac {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.198102] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29189ed6-71de-40b6-9b0a-62b6ef44c792 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.236110] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17817066-6860-4897-a5cb-da64e52afaa1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.245146] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-901bd9e2-6108-43de-ab83-b677f2b28773 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.258522] env[69328]: DEBUG nova.compute.provider_tree [None req-3b4b7cd4-301e-4a1a-86eb-8920abfdcbd3 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 922.308063] env[69328]: DEBUG nova.network.neutron [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 922.318642] env[69328]: DEBUG nova.compute.manager [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 922.432995] env[69328]: DEBUG nova.network.neutron [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Successfully created port: 958e374d-7706-40d9-aac0-ee00bd3140f0 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 922.437552] env[69328]: DEBUG nova.network.neutron [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] Updating instance_info_cache with network_info: [{"id": "46bc5af6-cdfc-4468-936e-604560442c91", "address": "fa:16:3e:06:8f:6a", "network": {"id": "edfe6995-ac16-4086-9cb4-efcda93045c6", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-703565163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e8bc0d144f44546bd21fb04277c998c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "196c2dd2-7ffc-4f7d-9c93-e1ef0a6a3a9f", "external-id": "nsx-vlan-transportzone-584", "segmentation_id": 584, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap46bc5af6-cd", "ovs_interfaceid": "46bc5af6-cdfc-4468-936e-604560442c91", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 922.458793] env[69328]: DEBUG oslo_vmware.api [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]526b682d-7753-9c43-0275-5a2ae7d2b5f1, 'name': SearchDatastore_Task, 'duration_secs': 0.023509} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.459791] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-716c7aae-aa60-43e7-b505-7617ce7cf49b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.464996] env[69328]: DEBUG oslo_vmware.api [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 922.464996] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5222967a-a305-8242-a789-a347019c7798" [ 922.464996] env[69328]: _type = "Task" [ 922.464996] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.472985] env[69328]: DEBUG oslo_vmware.api [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5222967a-a305-8242-a789-a347019c7798, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.480635] env[69328]: DEBUG oslo_vmware.api [None req-039d3904-cd51-4387-9b2a-bb7e74f42aee tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Task: {'id': task-3273547, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.215328} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.481016] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-039d3904-cd51-4387-9b2a-bb7e74f42aee tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 922.481173] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-039d3904-cd51-4387-9b2a-bb7e74f42aee tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 922.481374] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-039d3904-cd51-4387-9b2a-bb7e74f42aee tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 922.481572] env[69328]: INFO nova.compute.manager [None req-039d3904-cd51-4387-9b2a-bb7e74f42aee tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Took 1.13 seconds to destroy the instance on the hypervisor. [ 922.481820] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-039d3904-cd51-4387-9b2a-bb7e74f42aee tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 922.482013] env[69328]: DEBUG nova.compute.manager [-] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 922.482109] env[69328]: DEBUG nova.network.neutron [-] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 922.499101] env[69328]: DEBUG oslo_vmware.api [None req-0790747f-001b-4009-889e-af44a3ab0781 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273548, 'name': PowerOffVM_Task, 'duration_secs': 0.176422} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.500440] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-0790747f-001b-4009-889e-af44a3ab0781 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 20f750d7-1914-49bb-802f-464a30ffcf3a] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 922.500636] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0790747f-001b-4009-889e-af44a3ab0781 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 20f750d7-1914-49bb-802f-464a30ffcf3a] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 922.502126] env[69328]: DEBUG nova.compute.manager [req-d35c8456-f53e-4d31-872e-3e11edc5967d req-55327dfb-3f02-457d-80d3-0cca20ee27b4 service nova] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] Received event network-vif-plugged-46bc5af6-cdfc-4468-936e-604560442c91 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 922.502522] env[69328]: DEBUG oslo_concurrency.lockutils [req-d35c8456-f53e-4d31-872e-3e11edc5967d req-55327dfb-3f02-457d-80d3-0cca20ee27b4 service nova] Acquiring lock "73d5b248-3c3e-4e38-8d9c-1f9bfdb38494-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 922.502522] env[69328]: DEBUG oslo_concurrency.lockutils [req-d35c8456-f53e-4d31-872e-3e11edc5967d req-55327dfb-3f02-457d-80d3-0cca20ee27b4 service nova] Lock "73d5b248-3c3e-4e38-8d9c-1f9bfdb38494-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 922.502804] env[69328]: DEBUG oslo_concurrency.lockutils [req-d35c8456-f53e-4d31-872e-3e11edc5967d req-55327dfb-3f02-457d-80d3-0cca20ee27b4 service nova] Lock "73d5b248-3c3e-4e38-8d9c-1f9bfdb38494-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 922.502871] env[69328]: DEBUG nova.compute.manager [req-d35c8456-f53e-4d31-872e-3e11edc5967d req-55327dfb-3f02-457d-80d3-0cca20ee27b4 service nova] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] No waiting events found dispatching network-vif-plugged-46bc5af6-cdfc-4468-936e-604560442c91 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 922.503016] env[69328]: WARNING nova.compute.manager [req-d35c8456-f53e-4d31-872e-3e11edc5967d req-55327dfb-3f02-457d-80d3-0cca20ee27b4 service nova] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] Received unexpected event network-vif-plugged-46bc5af6-cdfc-4468-936e-604560442c91 for instance with vm_state building and task_state spawning. [ 922.503179] env[69328]: DEBUG nova.compute.manager [req-d35c8456-f53e-4d31-872e-3e11edc5967d req-55327dfb-3f02-457d-80d3-0cca20ee27b4 service nova] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] Received event network-changed-46bc5af6-cdfc-4468-936e-604560442c91 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 922.503328] env[69328]: DEBUG nova.compute.manager [req-d35c8456-f53e-4d31-872e-3e11edc5967d req-55327dfb-3f02-457d-80d3-0cca20ee27b4 service nova] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] Refreshing instance network info cache due to event network-changed-46bc5af6-cdfc-4468-936e-604560442c91. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 922.503486] env[69328]: DEBUG oslo_concurrency.lockutils [req-d35c8456-f53e-4d31-872e-3e11edc5967d req-55327dfb-3f02-457d-80d3-0cca20ee27b4 service nova] Acquiring lock "refresh_cache-73d5b248-3c3e-4e38-8d9c-1f9bfdb38494" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 922.503680] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a465ef0c-455b-4e09-af76-ccc70f8e3ae7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.531709] env[69328]: DEBUG oslo_vmware.api [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273549, 'name': Rename_Task, 'duration_secs': 0.172138} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.532169] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 922.532432] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-67e64503-8a60-4b01-a915-7b36a2b01347 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.538697] env[69328]: DEBUG oslo_vmware.api [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 922.538697] env[69328]: value = "task-3273551" [ 922.538697] env[69328]: _type = "Task" [ 922.538697] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.547908] env[69328]: DEBUG oslo_vmware.api [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273551, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.571372] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0790747f-001b-4009-889e-af44a3ab0781 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 20f750d7-1914-49bb-802f-464a30ffcf3a] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 922.571723] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0790747f-001b-4009-889e-af44a3ab0781 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 20f750d7-1914-49bb-802f-464a30ffcf3a] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 922.571956] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-0790747f-001b-4009-889e-af44a3ab0781 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Deleting the datastore file [datastore1] 20f750d7-1914-49bb-802f-464a30ffcf3a {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 922.572727] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0e061a1d-3b2b-4c5b-9d56-ec026e2a57ae {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.581237] env[69328]: DEBUG oslo_vmware.api [None req-0790747f-001b-4009-889e-af44a3ab0781 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 922.581237] env[69328]: value = "task-3273552" [ 922.581237] env[69328]: _type = "Task" [ 922.581237] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.589815] env[69328]: DEBUG oslo_vmware.api [None req-0790747f-001b-4009-889e-af44a3ab0781 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273552, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.714490] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b6e80060-f62c-47bc-822b-0bdeae508a3d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "refresh_cache-fd72bae3-cb72-48d0-a0df-9ea3a770a86c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 922.714695] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b6e80060-f62c-47bc-822b-0bdeae508a3d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquired lock "refresh_cache-fd72bae3-cb72-48d0-a0df-9ea3a770a86c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 922.714890] env[69328]: DEBUG nova.network.neutron [None req-b6e80060-f62c-47bc-822b-0bdeae508a3d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 922.715577] env[69328]: DEBUG nova.objects.instance [None req-b6e80060-f62c-47bc-822b-0bdeae508a3d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lazy-loading 'info_cache' on Instance uuid fd72bae3-cb72-48d0-a0df-9ea3a770a86c {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 922.761460] env[69328]: DEBUG nova.scheduler.client.report [None req-3b4b7cd4-301e-4a1a-86eb-8920abfdcbd3 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 922.887125] env[69328]: DEBUG nova.compute.manager [req-b715e90c-5c9c-42de-8bc2-91f63677bfe5 req-349fa83d-946b-41c3-a919-46e97832050a service nova] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Received event network-vif-deleted-cbf38f9d-1507-45bb-9684-bf804c86b93b {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 922.887394] env[69328]: INFO nova.compute.manager [req-b715e90c-5c9c-42de-8bc2-91f63677bfe5 req-349fa83d-946b-41c3-a919-46e97832050a service nova] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Neutron deleted interface cbf38f9d-1507-45bb-9684-bf804c86b93b; detaching it from the instance and deleting it from the info cache [ 922.887535] env[69328]: DEBUG nova.network.neutron [req-b715e90c-5c9c-42de-8bc2-91f63677bfe5 req-349fa83d-946b-41c3-a919-46e97832050a service nova] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 922.941821] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Releasing lock "refresh_cache-73d5b248-3c3e-4e38-8d9c-1f9bfdb38494" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 922.942205] env[69328]: DEBUG nova.compute.manager [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] Instance network_info: |[{"id": "46bc5af6-cdfc-4468-936e-604560442c91", "address": "fa:16:3e:06:8f:6a", "network": {"id": "edfe6995-ac16-4086-9cb4-efcda93045c6", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-703565163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e8bc0d144f44546bd21fb04277c998c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "196c2dd2-7ffc-4f7d-9c93-e1ef0a6a3a9f", "external-id": "nsx-vlan-transportzone-584", "segmentation_id": 584, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap46bc5af6-cd", "ovs_interfaceid": "46bc5af6-cdfc-4468-936e-604560442c91", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 922.942489] env[69328]: DEBUG oslo_concurrency.lockutils [req-d35c8456-f53e-4d31-872e-3e11edc5967d req-55327dfb-3f02-457d-80d3-0cca20ee27b4 service nova] Acquired lock "refresh_cache-73d5b248-3c3e-4e38-8d9c-1f9bfdb38494" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 922.942701] env[69328]: DEBUG nova.network.neutron [req-d35c8456-f53e-4d31-872e-3e11edc5967d req-55327dfb-3f02-457d-80d3-0cca20ee27b4 service nova] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] Refreshing network info cache for port 46bc5af6-cdfc-4468-936e-604560442c91 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 922.943990] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:06:8f:6a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '196c2dd2-7ffc-4f7d-9c93-e1ef0a6a3a9f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '46bc5af6-cdfc-4468-936e-604560442c91', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 922.952062] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 922.953178] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 922.953455] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5193cb0a-cf78-4fe3-ba4d-055605f7b84c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.983146] env[69328]: DEBUG oslo_vmware.api [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5222967a-a305-8242-a789-a347019c7798, 'name': SearchDatastore_Task, 'duration_secs': 0.034815} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.984311] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 922.984577] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 1413dcfe-3570-4657-b811-81a1acc159d1/1413dcfe-3570-4657-b811-81a1acc159d1.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 922.984808] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 922.984808] env[69328]: value = "task-3273553" [ 922.984808] env[69328]: _type = "Task" [ 922.984808] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.984983] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e9267f31-9e3b-44f5-b54e-7cb372f0b979 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.994493] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273553, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.995691] env[69328]: DEBUG oslo_vmware.api [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 922.995691] env[69328]: value = "task-3273554" [ 922.995691] env[69328]: _type = "Task" [ 922.995691] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.003870] env[69328]: DEBUG oslo_vmware.api [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273554, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.049398] env[69328]: DEBUG oslo_vmware.api [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273551, 'name': PowerOnVM_Task, 'duration_secs': 0.462312} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.049702] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 923.049945] env[69328]: INFO nova.compute.manager [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Took 8.34 seconds to spawn the instance on the hypervisor. [ 923.050182] env[69328]: DEBUG nova.compute.manager [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 923.051076] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85f7c54b-b10b-4898-9f66-14394e744bc4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.093606] env[69328]: DEBUG oslo_vmware.api [None req-0790747f-001b-4009-889e-af44a3ab0781 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273552, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.234097} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.093911] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-0790747f-001b-4009-889e-af44a3ab0781 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 923.094113] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0790747f-001b-4009-889e-af44a3ab0781 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 20f750d7-1914-49bb-802f-464a30ffcf3a] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 923.094302] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0790747f-001b-4009-889e-af44a3ab0781 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 20f750d7-1914-49bb-802f-464a30ffcf3a] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 923.094470] env[69328]: INFO nova.compute.manager [None req-0790747f-001b-4009-889e-af44a3ab0781 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 20f750d7-1914-49bb-802f-464a30ffcf3a] Took 1.13 seconds to destroy the instance on the hypervisor. [ 923.094711] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0790747f-001b-4009-889e-af44a3ab0781 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 923.094898] env[69328]: DEBUG nova.compute.manager [-] [instance: 20f750d7-1914-49bb-802f-464a30ffcf3a] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 923.094990] env[69328]: DEBUG nova.network.neutron [-] [instance: 20f750d7-1914-49bb-802f-464a30ffcf3a] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 923.268764] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3b4b7cd4-301e-4a1a-86eb-8920abfdcbd3 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.958s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 923.274091] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.969s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 923.275674] env[69328]: INFO nova.compute.claims [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 923.297702] env[69328]: INFO nova.scheduler.client.report [None req-3b4b7cd4-301e-4a1a-86eb-8920abfdcbd3 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Deleted allocations for instance 5a45bd6a-b063-4104-a85a-d78a4bb9452e [ 923.329614] env[69328]: DEBUG nova.compute.manager [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 923.343437] env[69328]: DEBUG nova.network.neutron [-] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 923.366486] env[69328]: DEBUG nova.virt.hardware [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 923.366750] env[69328]: DEBUG nova.virt.hardware [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 923.366907] env[69328]: DEBUG nova.virt.hardware [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 923.367092] env[69328]: DEBUG nova.virt.hardware [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 923.367244] env[69328]: DEBUG nova.virt.hardware [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 923.367401] env[69328]: DEBUG nova.virt.hardware [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 923.367584] env[69328]: DEBUG nova.virt.hardware [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 923.367738] env[69328]: DEBUG nova.virt.hardware [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 923.367911] env[69328]: DEBUG nova.virt.hardware [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 923.368104] env[69328]: DEBUG nova.virt.hardware [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 923.368298] env[69328]: DEBUG nova.virt.hardware [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 923.369552] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e09237e-dfff-4d34-8c39-e64b7126cfbd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.380708] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c322f12-adff-4861-b5c5-3e7fe20cecda {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.398495] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2a2249cb-5639-4439-ab58-c9f845164593 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.407784] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afd9ec1f-680f-43d9-ac48-1f8ddf116e9b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.443873] env[69328]: DEBUG nova.compute.manager [req-b715e90c-5c9c-42de-8bc2-91f63677bfe5 req-349fa83d-946b-41c3-a919-46e97832050a service nova] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Detach interface failed, port_id=cbf38f9d-1507-45bb-9684-bf804c86b93b, reason: Instance d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8 could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 923.499774] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273553, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.509123] env[69328]: DEBUG oslo_vmware.api [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273554, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.570033] env[69328]: INFO nova.compute.manager [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Took 45.07 seconds to build instance. [ 923.664690] env[69328]: DEBUG nova.network.neutron [req-d35c8456-f53e-4d31-872e-3e11edc5967d req-55327dfb-3f02-457d-80d3-0cca20ee27b4 service nova] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] Updated VIF entry in instance network info cache for port 46bc5af6-cdfc-4468-936e-604560442c91. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 923.665136] env[69328]: DEBUG nova.network.neutron [req-d35c8456-f53e-4d31-872e-3e11edc5967d req-55327dfb-3f02-457d-80d3-0cca20ee27b4 service nova] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] Updating instance_info_cache with network_info: [{"id": "46bc5af6-cdfc-4468-936e-604560442c91", "address": "fa:16:3e:06:8f:6a", "network": {"id": "edfe6995-ac16-4086-9cb4-efcda93045c6", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-703565163-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e8bc0d144f44546bd21fb04277c998c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "196c2dd2-7ffc-4f7d-9c93-e1ef0a6a3a9f", "external-id": "nsx-vlan-transportzone-584", "segmentation_id": 584, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap46bc5af6-cd", "ovs_interfaceid": "46bc5af6-cdfc-4468-936e-604560442c91", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 923.807667] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3b4b7cd4-301e-4a1a-86eb-8920abfdcbd3 tempest-InstanceActionsTestJSON-2103611164 tempest-InstanceActionsTestJSON-2103611164-project-member] Lock "5a45bd6a-b063-4104-a85a-d78a4bb9452e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.251s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 923.846393] env[69328]: INFO nova.compute.manager [-] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Took 1.36 seconds to deallocate network for instance. [ 923.889056] env[69328]: DEBUG nova.network.neutron [-] [instance: 20f750d7-1914-49bb-802f-464a30ffcf3a] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 923.944968] env[69328]: DEBUG nova.network.neutron [None req-b6e80060-f62c-47bc-822b-0bdeae508a3d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Updating instance_info_cache with network_info: [{"id": "eebd5d04-278d-4e22-9e5d-df5ae37877cf", "address": "fa:16:3e:d8:d9:39", "network": {"id": "e2ab6957-2c9d-4b95-91bd-5a62d9a284ba", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-967470666-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75d5853e3c724d02bacfa75173e38ab3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abcf0d10-3f3f-45dc-923e-1c78766e2dad", "external-id": "nsx-vlan-transportzone-405", "segmentation_id": 405, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeebd5d04-27", "ovs_interfaceid": "eebd5d04-278d-4e22-9e5d-df5ae37877cf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 924.002569] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273553, 'name': CreateVM_Task, 'duration_secs': 0.802325} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.002569] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 924.006188] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 924.006362] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 924.009782] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 924.009782] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64faa67f-94f3-42da-899d-07c96911c8b9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.024340] env[69328]: DEBUG oslo_vmware.api [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273554, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.612961} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.024646] env[69328]: DEBUG oslo_vmware.api [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Waiting for the task: (returnval){ [ 924.024646] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52fceb74-906f-661f-6c2d-cda9b2350659" [ 924.024646] env[69328]: _type = "Task" [ 924.024646] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.025252] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 1413dcfe-3570-4657-b811-81a1acc159d1/1413dcfe-3570-4657-b811-81a1acc159d1.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 924.025475] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 924.025770] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-34b9c7c3-4f00-45b0-9cbd-85bd8561d4c4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.038832] env[69328]: DEBUG oslo_vmware.api [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52fceb74-906f-661f-6c2d-cda9b2350659, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.040304] env[69328]: DEBUG oslo_vmware.api [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 924.040304] env[69328]: value = "task-3273555" [ 924.040304] env[69328]: _type = "Task" [ 924.040304] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.050689] env[69328]: DEBUG oslo_vmware.api [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273555, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.072687] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7c01c3dc-88f0-4e5b-98ef-bb8539f60fb7 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Lock "c751ef77-c3be-46cd-b7eb-fe139bf0998b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.583s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 924.168026] env[69328]: DEBUG oslo_concurrency.lockutils [req-d35c8456-f53e-4d31-872e-3e11edc5967d req-55327dfb-3f02-457d-80d3-0cca20ee27b4 service nova] Releasing lock "refresh_cache-73d5b248-3c3e-4e38-8d9c-1f9bfdb38494" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 924.310650] env[69328]: DEBUG nova.network.neutron [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Successfully updated port: 43596910-7f9b-47c0-972f-1dd3d779373c {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 924.355447] env[69328]: DEBUG oslo_concurrency.lockutils [None req-039d3904-cd51-4387-9b2a-bb7e74f42aee tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 924.395299] env[69328]: INFO nova.compute.manager [-] [instance: 20f750d7-1914-49bb-802f-464a30ffcf3a] Took 1.30 seconds to deallocate network for instance. [ 924.452657] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b6e80060-f62c-47bc-822b-0bdeae508a3d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Releasing lock "refresh_cache-fd72bae3-cb72-48d0-a0df-9ea3a770a86c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 924.452657] env[69328]: DEBUG nova.objects.instance [None req-b6e80060-f62c-47bc-822b-0bdeae508a3d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lazy-loading 'migration_context' on Instance uuid fd72bae3-cb72-48d0-a0df-9ea3a770a86c {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 924.541031] env[69328]: DEBUG oslo_vmware.api [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52fceb74-906f-661f-6c2d-cda9b2350659, 'name': SearchDatastore_Task, 'duration_secs': 0.045688} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.546448] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 924.546748] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 924.547046] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 924.547230] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 924.547421] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 924.548624] env[69328]: DEBUG nova.compute.manager [req-c2be8b26-498f-4798-afae-2604c93bf72b req-c386e3ea-3c15-440b-93cb-2307c329b538 service nova] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Received event network-vif-plugged-43596910-7f9b-47c0-972f-1dd3d779373c {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 924.548818] env[69328]: DEBUG oslo_concurrency.lockutils [req-c2be8b26-498f-4798-afae-2604c93bf72b req-c386e3ea-3c15-440b-93cb-2307c329b538 service nova] Acquiring lock "1f568ba1-8591-499b-b1ee-da16e26f81fc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 924.549061] env[69328]: DEBUG oslo_concurrency.lockutils [req-c2be8b26-498f-4798-afae-2604c93bf72b req-c386e3ea-3c15-440b-93cb-2307c329b538 service nova] Lock "1f568ba1-8591-499b-b1ee-da16e26f81fc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 924.549251] env[69328]: DEBUG oslo_concurrency.lockutils [req-c2be8b26-498f-4798-afae-2604c93bf72b req-c386e3ea-3c15-440b-93cb-2307c329b538 service nova] Lock "1f568ba1-8591-499b-b1ee-da16e26f81fc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 924.549454] env[69328]: DEBUG nova.compute.manager [req-c2be8b26-498f-4798-afae-2604c93bf72b req-c386e3ea-3c15-440b-93cb-2307c329b538 service nova] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] No waiting events found dispatching network-vif-plugged-43596910-7f9b-47c0-972f-1dd3d779373c {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 924.549701] env[69328]: WARNING nova.compute.manager [req-c2be8b26-498f-4798-afae-2604c93bf72b req-c386e3ea-3c15-440b-93cb-2307c329b538 service nova] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Received unexpected event network-vif-plugged-43596910-7f9b-47c0-972f-1dd3d779373c for instance with vm_state building and task_state spawning. [ 924.549976] env[69328]: DEBUG nova.compute.manager [req-c2be8b26-498f-4798-afae-2604c93bf72b req-c386e3ea-3c15-440b-93cb-2307c329b538 service nova] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Received event network-changed-43596910-7f9b-47c0-972f-1dd3d779373c {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 924.550199] env[69328]: DEBUG nova.compute.manager [req-c2be8b26-498f-4798-afae-2604c93bf72b req-c386e3ea-3c15-440b-93cb-2307c329b538 service nova] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Refreshing instance network info cache due to event network-changed-43596910-7f9b-47c0-972f-1dd3d779373c. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 924.550416] env[69328]: DEBUG oslo_concurrency.lockutils [req-c2be8b26-498f-4798-afae-2604c93bf72b req-c386e3ea-3c15-440b-93cb-2307c329b538 service nova] Acquiring lock "refresh_cache-1f568ba1-8591-499b-b1ee-da16e26f81fc" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 924.550550] env[69328]: DEBUG oslo_concurrency.lockutils [req-c2be8b26-498f-4798-afae-2604c93bf72b req-c386e3ea-3c15-440b-93cb-2307c329b538 service nova] Acquired lock "refresh_cache-1f568ba1-8591-499b-b1ee-da16e26f81fc" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 924.550727] env[69328]: DEBUG nova.network.neutron [req-c2be8b26-498f-4798-afae-2604c93bf72b req-c386e3ea-3c15-440b-93cb-2307c329b538 service nova] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Refreshing network info cache for port 43596910-7f9b-47c0-972f-1dd3d779373c {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 924.554634] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-97495f00-e35c-440b-84d8-7b90657f2e1d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.563960] env[69328]: DEBUG oslo_vmware.api [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273555, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.091499} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.564274] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 924.565173] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd60890f-3a54-411a-99f6-cada95553f84 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.571707] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 924.571707] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 924.573260] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-549c7a35-dadd-45a1-a16f-a82394b7db96 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.603162] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Reconfiguring VM instance instance-00000049 to attach disk [datastore1] 1413dcfe-3570-4657-b811-81a1acc159d1/1413dcfe-3570-4657-b811-81a1acc159d1.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 924.606984] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5e4acbe1-b8a3-449a-96d7-7b5335a45dba {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.625391] env[69328]: DEBUG oslo_vmware.api [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Waiting for the task: (returnval){ [ 924.625391] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]528f36e2-b431-5132-a179-f7c920849b6a" [ 924.625391] env[69328]: _type = "Task" [ 924.625391] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.635672] env[69328]: DEBUG oslo_vmware.api [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]528f36e2-b431-5132-a179-f7c920849b6a, 'name': SearchDatastore_Task, 'duration_secs': 0.014722} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.639323] env[69328]: DEBUG oslo_vmware.api [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 924.639323] env[69328]: value = "task-3273556" [ 924.639323] env[69328]: _type = "Task" [ 924.639323] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.641095] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12132df6-d83a-4a75-bde6-314ee25170f7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.652787] env[69328]: DEBUG oslo_vmware.api [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273556, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.653126] env[69328]: DEBUG oslo_vmware.api [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Waiting for the task: (returnval){ [ 924.653126] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b3ebaa-c4a3-73b2-861f-7522b3ffc947" [ 924.653126] env[69328]: _type = "Task" [ 924.653126] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.666100] env[69328]: DEBUG oslo_vmware.api [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b3ebaa-c4a3-73b2-861f-7522b3ffc947, 'name': SearchDatastore_Task, 'duration_secs': 0.011961} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.666454] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 924.666748] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494/73d5b248-3c3e-4e38-8d9c-1f9bfdb38494.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 924.667037] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-217acf95-3806-4965-b33e-81f822df26ef {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.674652] env[69328]: DEBUG oslo_vmware.api [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Waiting for the task: (returnval){ [ 924.674652] env[69328]: value = "task-3273557" [ 924.674652] env[69328]: _type = "Task" [ 924.674652] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.683571] env[69328]: DEBUG oslo_vmware.api [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273557, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.728064] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf2baf77-a393-4eed-b1ad-b7b980b675f1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.736214] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-415180f1-0807-4a70-8e19-caa3a88a870f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.770450] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfb0a03a-3759-4cb9-af8a-2d04b4ca5f7c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.779207] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b36faecd-d278-4c5d-9479-acf3fd8b1a02 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.799764] env[69328]: DEBUG nova.compute.provider_tree [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 924.902919] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0790747f-001b-4009-889e-af44a3ab0781 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 924.954893] env[69328]: DEBUG nova.objects.base [None req-b6e80060-f62c-47bc-822b-0bdeae508a3d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=69328) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 924.955955] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5b86711-ecaa-4cee-8e26-b65110359826 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.979947] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4d864e7-1812-47f8-89ce-725cb7335d3a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.986951] env[69328]: DEBUG oslo_vmware.api [None req-b6e80060-f62c-47bc-822b-0bdeae508a3d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 924.986951] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52fb8d98-7795-f4d2-96b2-9e629876934c" [ 924.986951] env[69328]: _type = "Task" [ 924.986951] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.996688] env[69328]: DEBUG oslo_vmware.api [None req-b6e80060-f62c-47bc-822b-0bdeae508a3d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52fb8d98-7795-f4d2-96b2-9e629876934c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.999116] env[69328]: DEBUG nova.compute.manager [req-6e4b03d3-66fe-48f0-a8a6-6e072e27abbb req-95574a41-1d75-4f31-a2b2-cc239d4d9353 service nova] [instance: 20f750d7-1914-49bb-802f-464a30ffcf3a] Received event network-vif-deleted-e4e1721b-99c8-403d-b056-e89c1a106a41 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 925.101494] env[69328]: DEBUG nova.network.neutron [req-c2be8b26-498f-4798-afae-2604c93bf72b req-c386e3ea-3c15-440b-93cb-2307c329b538 service nova] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 925.159128] env[69328]: DEBUG oslo_vmware.api [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273556, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.186909] env[69328]: DEBUG oslo_vmware.api [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273557, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.49617} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.187203] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494/73d5b248-3c3e-4e38-8d9c-1f9bfdb38494.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 925.187637] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 925.187962] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2f2f74c2-b582-4fdd-87e0-7a9e97a3f38f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.197321] env[69328]: DEBUG oslo_vmware.api [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Waiting for the task: (returnval){ [ 925.197321] env[69328]: value = "task-3273558" [ 925.197321] env[69328]: _type = "Task" [ 925.197321] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.206495] env[69328]: DEBUG oslo_vmware.api [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273558, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.241426] env[69328]: DEBUG nova.network.neutron [req-c2be8b26-498f-4798-afae-2604c93bf72b req-c386e3ea-3c15-440b-93cb-2307c329b538 service nova] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 925.302878] env[69328]: DEBUG nova.scheduler.client.report [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 925.501785] env[69328]: DEBUG oslo_vmware.api [None req-b6e80060-f62c-47bc-822b-0bdeae508a3d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52fb8d98-7795-f4d2-96b2-9e629876934c, 'name': SearchDatastore_Task, 'duration_secs': 0.031786} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.501785] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b6e80060-f62c-47bc-822b-0bdeae508a3d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 925.661719] env[69328]: DEBUG oslo_vmware.api [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273556, 'name': ReconfigVM_Task, 'duration_secs': 0.550404} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.661719] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Reconfigured VM instance instance-00000049 to attach disk [datastore1] 1413dcfe-3570-4657-b811-81a1acc159d1/1413dcfe-3570-4657-b811-81a1acc159d1.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 925.662431] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5a941fca-46e7-4669-93e7-4f32a1eea225 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.671111] env[69328]: DEBUG oslo_vmware.api [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 925.671111] env[69328]: value = "task-3273559" [ 925.671111] env[69328]: _type = "Task" [ 925.671111] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.681140] env[69328]: DEBUG oslo_vmware.api [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273559, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.707805] env[69328]: DEBUG oslo_vmware.api [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273558, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.208134} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.708113] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 925.710580] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-635fabd1-3c58-4f89-a8c3-ca7f68c2eec5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.734840] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] Reconfiguring VM instance instance-0000004a to attach disk [datastore2] 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494/73d5b248-3c3e-4e38-8d9c-1f9bfdb38494.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 925.735326] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-253ba3cb-6015-4fe0-b881-84e15d59e4d7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.750843] env[69328]: DEBUG oslo_concurrency.lockutils [req-c2be8b26-498f-4798-afae-2604c93bf72b req-c386e3ea-3c15-440b-93cb-2307c329b538 service nova] Releasing lock "refresh_cache-1f568ba1-8591-499b-b1ee-da16e26f81fc" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 925.758156] env[69328]: DEBUG oslo_vmware.api [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Waiting for the task: (returnval){ [ 925.758156] env[69328]: value = "task-3273560" [ 925.758156] env[69328]: _type = "Task" [ 925.758156] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.767222] env[69328]: DEBUG oslo_vmware.api [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273560, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.808515] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.537s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 925.809322] env[69328]: DEBUG nova.compute.manager [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 925.812376] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.873s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 925.814191] env[69328]: INFO nova.compute.claims [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 926.182565] env[69328]: DEBUG oslo_vmware.api [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273559, 'name': Rename_Task, 'duration_secs': 0.23568} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.182952] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 926.183158] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8f09b8c5-fbc6-411a-8f50-197e6fa3f49b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.191680] env[69328]: DEBUG oslo_vmware.api [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 926.191680] env[69328]: value = "task-3273561" [ 926.191680] env[69328]: _type = "Task" [ 926.191680] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.200342] env[69328]: DEBUG oslo_vmware.api [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273561, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.268594] env[69328]: DEBUG oslo_vmware.api [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273560, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.318361] env[69328]: DEBUG nova.compute.utils [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 926.324038] env[69328]: DEBUG nova.compute.manager [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 926.324038] env[69328]: DEBUG nova.network.neutron [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 926.376189] env[69328]: DEBUG nova.policy [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'aed0f81423aa4a24949ad1dc3cfdef2c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f50ac50ef6ae4abc83a8064746de7029', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 926.675524] env[69328]: DEBUG nova.compute.manager [req-53703c66-a701-483e-8bcd-420ba765cf23 req-497efc5e-5325-45ac-930a-6dbb65d33dcd service nova] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Received event network-vif-plugged-958e374d-7706-40d9-aac0-ee00bd3140f0 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 926.675741] env[69328]: DEBUG oslo_concurrency.lockutils [req-53703c66-a701-483e-8bcd-420ba765cf23 req-497efc5e-5325-45ac-930a-6dbb65d33dcd service nova] Acquiring lock "1f568ba1-8591-499b-b1ee-da16e26f81fc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 926.676015] env[69328]: DEBUG oslo_concurrency.lockutils [req-53703c66-a701-483e-8bcd-420ba765cf23 req-497efc5e-5325-45ac-930a-6dbb65d33dcd service nova] Lock "1f568ba1-8591-499b-b1ee-da16e26f81fc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 926.676134] env[69328]: DEBUG oslo_concurrency.lockutils [req-53703c66-a701-483e-8bcd-420ba765cf23 req-497efc5e-5325-45ac-930a-6dbb65d33dcd service nova] Lock "1f568ba1-8591-499b-b1ee-da16e26f81fc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 926.676303] env[69328]: DEBUG nova.compute.manager [req-53703c66-a701-483e-8bcd-420ba765cf23 req-497efc5e-5325-45ac-930a-6dbb65d33dcd service nova] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] No waiting events found dispatching network-vif-plugged-958e374d-7706-40d9-aac0-ee00bd3140f0 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 926.676474] env[69328]: WARNING nova.compute.manager [req-53703c66-a701-483e-8bcd-420ba765cf23 req-497efc5e-5325-45ac-930a-6dbb65d33dcd service nova] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Received unexpected event network-vif-plugged-958e374d-7706-40d9-aac0-ee00bd3140f0 for instance with vm_state building and task_state spawning. [ 926.704971] env[69328]: DEBUG oslo_vmware.api [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273561, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.771945] env[69328]: DEBUG nova.network.neutron [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Successfully updated port: 958e374d-7706-40d9-aac0-ee00bd3140f0 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 926.773241] env[69328]: DEBUG oslo_vmware.api [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273560, 'name': ReconfigVM_Task, 'duration_secs': 0.753123} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.773507] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] Reconfigured VM instance instance-0000004a to attach disk [datastore2] 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494/73d5b248-3c3e-4e38-8d9c-1f9bfdb38494.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 926.774581] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0e89c717-b768-43e4-bfed-6b36deff5700 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.782652] env[69328]: DEBUG oslo_vmware.api [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Waiting for the task: (returnval){ [ 926.782652] env[69328]: value = "task-3273562" [ 926.782652] env[69328]: _type = "Task" [ 926.782652] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.792790] env[69328]: DEBUG oslo_vmware.api [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273562, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.808186] env[69328]: DEBUG nova.network.neutron [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Successfully created port: a3cab44b-0572-4007-bab9-e84ba084f70a {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 926.824259] env[69328]: DEBUG nova.compute.manager [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 927.204193] env[69328]: DEBUG oslo_vmware.api [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273561, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.206587] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acf3cf06-13f8-4c16-b0e8-5b3facc1e9ac {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.215281] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36a1899d-8bd9-4f20-ad25-dd43e565b8c8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.248892] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84af7630-91b9-409d-8f28-d7862ac2144d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.258140] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9a51bb9-0950-4b47-89a3-fa27d72ac252 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.274243] env[69328]: DEBUG nova.compute.provider_tree [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 927.276021] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Acquiring lock "refresh_cache-1f568ba1-8591-499b-b1ee-da16e26f81fc" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.276194] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Acquired lock "refresh_cache-1f568ba1-8591-499b-b1ee-da16e26f81fc" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 927.276299] env[69328]: DEBUG nova.network.neutron [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 927.297268] env[69328]: DEBUG oslo_vmware.api [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273562, 'name': Rename_Task, 'duration_secs': 0.258463} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.297614] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 927.297915] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8e8c5a98-0c3d-483b-8572-0d1d4b0c51e1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.306341] env[69328]: DEBUG oslo_vmware.api [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Waiting for the task: (returnval){ [ 927.306341] env[69328]: value = "task-3273563" [ 927.306341] env[69328]: _type = "Task" [ 927.306341] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.315235] env[69328]: DEBUG oslo_vmware.api [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273563, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.706226] env[69328]: DEBUG oslo_vmware.api [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273561, 'name': PowerOnVM_Task, 'duration_secs': 1.098966} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.706666] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 927.707033] env[69328]: INFO nova.compute.manager [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Took 9.40 seconds to spawn the instance on the hypervisor. [ 927.707318] env[69328]: DEBUG nova.compute.manager [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 927.708164] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deb20a87-e59d-4438-9e35-33a7a53a49e3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.779200] env[69328]: DEBUG nova.scheduler.client.report [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 927.817501] env[69328]: DEBUG oslo_vmware.api [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273563, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.818524] env[69328]: DEBUG nova.network.neutron [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 927.837559] env[69328]: DEBUG nova.compute.manager [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 927.863749] env[69328]: DEBUG nova.virt.hardware [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 927.864015] env[69328]: DEBUG nova.virt.hardware [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 927.864187] env[69328]: DEBUG nova.virt.hardware [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 927.864372] env[69328]: DEBUG nova.virt.hardware [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 927.864521] env[69328]: DEBUG nova.virt.hardware [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 927.864669] env[69328]: DEBUG nova.virt.hardware [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 927.864879] env[69328]: DEBUG nova.virt.hardware [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 927.865062] env[69328]: DEBUG nova.virt.hardware [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 927.865244] env[69328]: DEBUG nova.virt.hardware [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 927.865408] env[69328]: DEBUG nova.virt.hardware [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 927.865578] env[69328]: DEBUG nova.virt.hardware [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 927.866762] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2018dc0-02fc-4944-a5f4-70a65d695e1b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.875356] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05cb7135-3a59-4d78-b938-84276ec623bd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.234058] env[69328]: DEBUG nova.network.neutron [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Updating instance_info_cache with network_info: [{"id": "43596910-7f9b-47c0-972f-1dd3d779373c", "address": "fa:16:3e:91:de:bb", "network": {"id": "55bad467-2007-4b97-adcb-e617016063f5", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1014543481", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.34", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f71a072b33154efe9636b50e25f93381", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5a629f-6902-4d30-9278-74b443a8371d", "external-id": "nsx-vlan-transportzone-185", "segmentation_id": 185, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43596910-7f", "ovs_interfaceid": "43596910-7f9b-47c0-972f-1dd3d779373c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "958e374d-7706-40d9-aac0-ee00bd3140f0", "address": "fa:16:3e:27:42:2c", "network": {"id": "503b721a-0340-4a37-a93c-2106abfa16b5", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-893160243", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.189", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "f71a072b33154efe9636b50e25f93381", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bf86b133-2b7b-4cab-8f6f-5a0856d34c7b", "external-id": "nsx-vlan-transportzone-557", "segmentation_id": 557, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap958e374d-77", "ovs_interfaceid": "958e374d-7706-40d9-aac0-ee00bd3140f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 928.235161] env[69328]: INFO nova.compute.manager [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Took 49.07 seconds to build instance. [ 928.248270] env[69328]: DEBUG nova.compute.manager [req-9ae84002-16ac-4039-9319-07931fb0b990 req-1876ca6f-625e-45c5-8a36-86f7e033c937 service nova] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Received event network-vif-plugged-a3cab44b-0572-4007-bab9-e84ba084f70a {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 928.248779] env[69328]: DEBUG oslo_concurrency.lockutils [req-9ae84002-16ac-4039-9319-07931fb0b990 req-1876ca6f-625e-45c5-8a36-86f7e033c937 service nova] Acquiring lock "76210566-12d7-4f6a-afa1-6329e87e0f85-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 928.249179] env[69328]: DEBUG oslo_concurrency.lockutils [req-9ae84002-16ac-4039-9319-07931fb0b990 req-1876ca6f-625e-45c5-8a36-86f7e033c937 service nova] Lock "76210566-12d7-4f6a-afa1-6329e87e0f85-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 928.249179] env[69328]: DEBUG oslo_concurrency.lockutils [req-9ae84002-16ac-4039-9319-07931fb0b990 req-1876ca6f-625e-45c5-8a36-86f7e033c937 service nova] Lock "76210566-12d7-4f6a-afa1-6329e87e0f85-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 928.249352] env[69328]: DEBUG nova.compute.manager [req-9ae84002-16ac-4039-9319-07931fb0b990 req-1876ca6f-625e-45c5-8a36-86f7e033c937 service nova] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] No waiting events found dispatching network-vif-plugged-a3cab44b-0572-4007-bab9-e84ba084f70a {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 928.249847] env[69328]: WARNING nova.compute.manager [req-9ae84002-16ac-4039-9319-07931fb0b990 req-1876ca6f-625e-45c5-8a36-86f7e033c937 service nova] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Received unexpected event network-vif-plugged-a3cab44b-0572-4007-bab9-e84ba084f70a for instance with vm_state building and task_state spawning. [ 928.286542] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.474s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 928.287421] env[69328]: DEBUG nova.compute.manager [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 928.292308] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.585s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 928.298889] env[69328]: INFO nova.compute.claims [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 928.318544] env[69328]: DEBUG oslo_vmware.api [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273563, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.365392] env[69328]: DEBUG nova.network.neutron [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Successfully updated port: a3cab44b-0572-4007-bab9-e84ba084f70a {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 928.521935] env[69328]: INFO nova.compute.manager [None req-6c230130-bc6e-4335-a78d-25907c763e95 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Rescuing [ 928.522221] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6c230130-bc6e-4335-a78d-25907c763e95 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquiring lock "refresh_cache-1413dcfe-3570-4657-b811-81a1acc159d1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 928.522374] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6c230130-bc6e-4335-a78d-25907c763e95 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquired lock "refresh_cache-1413dcfe-3570-4657-b811-81a1acc159d1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 928.522556] env[69328]: DEBUG nova.network.neutron [None req-6c230130-bc6e-4335-a78d-25907c763e95 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 928.736140] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Releasing lock "refresh_cache-1f568ba1-8591-499b-b1ee-da16e26f81fc" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 928.736140] env[69328]: DEBUG nova.compute.manager [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Instance network_info: |[{"id": "43596910-7f9b-47c0-972f-1dd3d779373c", "address": "fa:16:3e:91:de:bb", "network": {"id": "55bad467-2007-4b97-adcb-e617016063f5", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1014543481", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.34", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f71a072b33154efe9636b50e25f93381", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5a629f-6902-4d30-9278-74b443a8371d", "external-id": "nsx-vlan-transportzone-185", "segmentation_id": 185, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43596910-7f", "ovs_interfaceid": "43596910-7f9b-47c0-972f-1dd3d779373c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "958e374d-7706-40d9-aac0-ee00bd3140f0", "address": "fa:16:3e:27:42:2c", "network": {"id": "503b721a-0340-4a37-a93c-2106abfa16b5", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-893160243", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.189", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "f71a072b33154efe9636b50e25f93381", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bf86b133-2b7b-4cab-8f6f-5a0856d34c7b", "external-id": "nsx-vlan-transportzone-557", "segmentation_id": 557, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap958e374d-77", "ovs_interfaceid": "958e374d-7706-40d9-aac0-ee00bd3140f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 928.736732] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:91:de:bb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b5a629f-6902-4d30-9278-74b443a8371d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '43596910-7f9b-47c0-972f-1dd3d779373c', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:27:42:2c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bf86b133-2b7b-4cab-8f6f-5a0856d34c7b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '958e374d-7706-40d9-aac0-ee00bd3140f0', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 928.746719] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 928.747114] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c0ec662a-dab8-473b-a866-90db07c5f25c tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Lock "1413dcfe-3570-4657-b811-81a1acc159d1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.592s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 928.747341] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 928.747616] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-39cae397-d8a6-4c73-a8c4-fbe270d62799 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.778248] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 928.778248] env[69328]: value = "task-3273564" [ 928.778248] env[69328]: _type = "Task" [ 928.778248] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.786377] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273564, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.807274] env[69328]: DEBUG nova.compute.utils [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 928.811812] env[69328]: DEBUG nova.compute.manager [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 928.812180] env[69328]: DEBUG nova.network.neutron [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 928.826443] env[69328]: DEBUG oslo_vmware.api [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273563, 'name': PowerOnVM_Task, 'duration_secs': 1.036285} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.826724] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 928.826973] env[69328]: INFO nova.compute.manager [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] Took 7.98 seconds to spawn the instance on the hypervisor. [ 928.827529] env[69328]: DEBUG nova.compute.manager [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 928.828983] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c2f4502-f27a-40fa-8c2c-f4bd9b02d306 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.858541] env[69328]: DEBUG nova.policy [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '19265c910cd04814978013416bf2a18a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '636412f89c9d488a9cfd6f19ef046efc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 928.870877] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Acquiring lock "refresh_cache-76210566-12d7-4f6a-afa1-6329e87e0f85" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 928.871036] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Acquired lock "refresh_cache-76210566-12d7-4f6a-afa1-6329e87e0f85" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 928.871137] env[69328]: DEBUG nova.network.neutron [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 929.034591] env[69328]: DEBUG nova.compute.manager [req-b4058648-e23a-473b-8317-4ff06857a3da req-83b20a24-ae80-4f6d-a131-4cde7dc775f1 service nova] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Received event network-changed-958e374d-7706-40d9-aac0-ee00bd3140f0 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 929.034804] env[69328]: DEBUG nova.compute.manager [req-b4058648-e23a-473b-8317-4ff06857a3da req-83b20a24-ae80-4f6d-a131-4cde7dc775f1 service nova] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Refreshing instance network info cache due to event network-changed-958e374d-7706-40d9-aac0-ee00bd3140f0. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 929.035034] env[69328]: DEBUG oslo_concurrency.lockutils [req-b4058648-e23a-473b-8317-4ff06857a3da req-83b20a24-ae80-4f6d-a131-4cde7dc775f1 service nova] Acquiring lock "refresh_cache-1f568ba1-8591-499b-b1ee-da16e26f81fc" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.035185] env[69328]: DEBUG oslo_concurrency.lockutils [req-b4058648-e23a-473b-8317-4ff06857a3da req-83b20a24-ae80-4f6d-a131-4cde7dc775f1 service nova] Acquired lock "refresh_cache-1f568ba1-8591-499b-b1ee-da16e26f81fc" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 929.035348] env[69328]: DEBUG nova.network.neutron [req-b4058648-e23a-473b-8317-4ff06857a3da req-83b20a24-ae80-4f6d-a131-4cde7dc775f1 service nova] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Refreshing network info cache for port 958e374d-7706-40d9-aac0-ee00bd3140f0 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 929.125221] env[69328]: DEBUG nova.network.neutron [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Successfully created port: 3a99454a-a8d4-4939-8e61-b21d121522f2 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 929.235545] env[69328]: DEBUG nova.network.neutron [None req-6c230130-bc6e-4335-a78d-25907c763e95 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Updating instance_info_cache with network_info: [{"id": "7843ca64-fb43-4866-9bd7-f10b7c7e085e", "address": "fa:16:3e:30:b0:de", "network": {"id": "bd41ac10-1850-45a2-8e46-6ebdca3d8e13", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-766393176-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efd0e2d2f9ba4416bd8fd08dad912465", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e614f8e-6b11-4b6b-a421-904bca6acd91", "external-id": "nsx-vlan-transportzone-923", "segmentation_id": 923, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7843ca64-fb", "ovs_interfaceid": "7843ca64-fb43-4866-9bd7-f10b7c7e085e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 929.291480] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273564, 'name': CreateVM_Task, 'duration_secs': 0.421489} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.291480] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 929.291480] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.291480] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 929.291480] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 929.291480] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fbcb7aa8-920c-4eed-8997-08c64a4cbd65 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.300019] env[69328]: DEBUG oslo_vmware.api [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Waiting for the task: (returnval){ [ 929.300019] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b3848a-ead2-a9a8-cd61-b050e40d71d2" [ 929.300019] env[69328]: _type = "Task" [ 929.300019] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.309053] env[69328]: DEBUG oslo_vmware.api [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b3848a-ead2-a9a8-cd61-b050e40d71d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.312247] env[69328]: DEBUG nova.compute.manager [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 929.347243] env[69328]: INFO nova.compute.manager [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] Took 44.56 seconds to build instance. [ 929.403364] env[69328]: DEBUG nova.network.neutron [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 929.533743] env[69328]: DEBUG nova.network.neutron [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Updating instance_info_cache with network_info: [{"id": "a3cab44b-0572-4007-bab9-e84ba084f70a", "address": "fa:16:3e:02:7d:25", "network": {"id": "3be6b159-5d5c-4752-b79d-0ed6110569d0", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-915151552-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f50ac50ef6ae4abc83a8064746de7029", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe20ef0e-0991-44d7-887d-08dddac0b56b", "external-id": "nsx-vlan-transportzone-991", "segmentation_id": 991, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3cab44b-05", "ovs_interfaceid": "a3cab44b-0572-4007-bab9-e84ba084f70a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 929.627680] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d97a2d0e-dc0c-43ee-b311-04203bf22450 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.637207] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-978e32c4-201e-43a6-8f13-9a713fcb02ae {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.672605] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c50cbdff-cede-48c9-ae44-335381ccc67e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.681974] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17e1809b-b660-4688-939c-e260c85a1064 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.696315] env[69328]: DEBUG nova.compute.provider_tree [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 929.741173] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6c230130-bc6e-4335-a78d-25907c763e95 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Releasing lock "refresh_cache-1413dcfe-3570-4657-b811-81a1acc159d1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 929.815945] env[69328]: DEBUG oslo_vmware.api [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b3848a-ead2-a9a8-cd61-b050e40d71d2, 'name': SearchDatastore_Task, 'duration_secs': 0.014519} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.815945] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 929.815945] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 929.815945] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.815945] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 929.815945] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 929.819250] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e4c5b324-dc4b-4722-a629-9a9be8e25b77 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.833682] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 929.833682] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 929.833682] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b2328cd-9af9-4446-985d-0193831e67eb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.837364] env[69328]: DEBUG oslo_vmware.api [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Waiting for the task: (returnval){ [ 929.837364] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52c8b3dc-f819-6064-b2e0-dd77186208c0" [ 929.837364] env[69328]: _type = "Task" [ 929.837364] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.841695] env[69328]: DEBUG nova.network.neutron [req-b4058648-e23a-473b-8317-4ff06857a3da req-83b20a24-ae80-4f6d-a131-4cde7dc775f1 service nova] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Updated VIF entry in instance network info cache for port 958e374d-7706-40d9-aac0-ee00bd3140f0. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 929.843084] env[69328]: DEBUG nova.network.neutron [req-b4058648-e23a-473b-8317-4ff06857a3da req-83b20a24-ae80-4f6d-a131-4cde7dc775f1 service nova] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Updating instance_info_cache with network_info: [{"id": "43596910-7f9b-47c0-972f-1dd3d779373c", "address": "fa:16:3e:91:de:bb", "network": {"id": "55bad467-2007-4b97-adcb-e617016063f5", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1014543481", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.34", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f71a072b33154efe9636b50e25f93381", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5a629f-6902-4d30-9278-74b443a8371d", "external-id": "nsx-vlan-transportzone-185", "segmentation_id": 185, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43596910-7f", "ovs_interfaceid": "43596910-7f9b-47c0-972f-1dd3d779373c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "958e374d-7706-40d9-aac0-ee00bd3140f0", "address": "fa:16:3e:27:42:2c", "network": {"id": "503b721a-0340-4a37-a93c-2106abfa16b5", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-893160243", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.189", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "f71a072b33154efe9636b50e25f93381", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bf86b133-2b7b-4cab-8f6f-5a0856d34c7b", "external-id": "nsx-vlan-transportzone-557", "segmentation_id": 557, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap958e374d-77", "ovs_interfaceid": "958e374d-7706-40d9-aac0-ee00bd3140f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 929.852790] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ac1de219-62fe-4b90-bb91-75ca6e09b889 tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Lock "73d5b248-3c3e-4e38-8d9c-1f9bfdb38494" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.079s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 929.853153] env[69328]: DEBUG oslo_vmware.api [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52c8b3dc-f819-6064-b2e0-dd77186208c0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.036909] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Releasing lock "refresh_cache-76210566-12d7-4f6a-afa1-6329e87e0f85" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 930.037275] env[69328]: DEBUG nova.compute.manager [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Instance network_info: |[{"id": "a3cab44b-0572-4007-bab9-e84ba084f70a", "address": "fa:16:3e:02:7d:25", "network": {"id": "3be6b159-5d5c-4752-b79d-0ed6110569d0", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-915151552-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f50ac50ef6ae4abc83a8064746de7029", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe20ef0e-0991-44d7-887d-08dddac0b56b", "external-id": "nsx-vlan-transportzone-991", "segmentation_id": 991, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3cab44b-05", "ovs_interfaceid": "a3cab44b-0572-4007-bab9-e84ba084f70a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 930.037686] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:02:7d:25', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fe20ef0e-0991-44d7-887d-08dddac0b56b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a3cab44b-0572-4007-bab9-e84ba084f70a', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 930.046067] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Creating folder: Project (f50ac50ef6ae4abc83a8064746de7029). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 930.046359] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f4d48d01-79f5-4347-9a27-e9ba01ae4996 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.058842] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Created folder: Project (f50ac50ef6ae4abc83a8064746de7029) in parent group-v653649. [ 930.059171] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Creating folder: Instances. Parent ref: group-v653862. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 930.059380] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-22bbe4de-3e09-4e85-9f13-4e47bcdf2113 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.070470] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Created folder: Instances in parent group-v653862. [ 930.070707] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 930.071041] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 930.071135] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9027a94b-44d6-4a50-995d-d4555a65db61 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.096146] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 930.096146] env[69328]: value = "task-3273567" [ 930.096146] env[69328]: _type = "Task" [ 930.096146] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.105022] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273567, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.200406] env[69328]: DEBUG nova.scheduler.client.report [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 930.279424] env[69328]: DEBUG nova.compute.manager [req-14339df2-5b26-4eb3-a50e-c2ae4ea7fa15 req-2f6a9cf4-153e-45b3-8fa6-bf5958699d18 service nova] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Received event network-changed-a3cab44b-0572-4007-bab9-e84ba084f70a {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 930.279424] env[69328]: DEBUG nova.compute.manager [req-14339df2-5b26-4eb3-a50e-c2ae4ea7fa15 req-2f6a9cf4-153e-45b3-8fa6-bf5958699d18 service nova] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Refreshing instance network info cache due to event network-changed-a3cab44b-0572-4007-bab9-e84ba084f70a. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 930.279743] env[69328]: DEBUG oslo_concurrency.lockutils [req-14339df2-5b26-4eb3-a50e-c2ae4ea7fa15 req-2f6a9cf4-153e-45b3-8fa6-bf5958699d18 service nova] Acquiring lock "refresh_cache-76210566-12d7-4f6a-afa1-6329e87e0f85" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.279743] env[69328]: DEBUG oslo_concurrency.lockutils [req-14339df2-5b26-4eb3-a50e-c2ae4ea7fa15 req-2f6a9cf4-153e-45b3-8fa6-bf5958699d18 service nova] Acquired lock "refresh_cache-76210566-12d7-4f6a-afa1-6329e87e0f85" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 930.279953] env[69328]: DEBUG nova.network.neutron [req-14339df2-5b26-4eb3-a50e-c2ae4ea7fa15 req-2f6a9cf4-153e-45b3-8fa6-bf5958699d18 service nova] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Refreshing network info cache for port a3cab44b-0572-4007-bab9-e84ba084f70a {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 930.323648] env[69328]: DEBUG nova.compute.manager [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 930.347188] env[69328]: DEBUG oslo_concurrency.lockutils [req-b4058648-e23a-473b-8317-4ff06857a3da req-83b20a24-ae80-4f6d-a131-4cde7dc775f1 service nova] Releasing lock "refresh_cache-1f568ba1-8591-499b-b1ee-da16e26f81fc" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 930.357235] env[69328]: DEBUG oslo_vmware.api [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52c8b3dc-f819-6064-b2e0-dd77186208c0, 'name': SearchDatastore_Task, 'duration_secs': 0.010837} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.361057] env[69328]: DEBUG nova.virt.hardware [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 930.361418] env[69328]: DEBUG nova.virt.hardware [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 930.361671] env[69328]: DEBUG nova.virt.hardware [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 930.362141] env[69328]: DEBUG nova.virt.hardware [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 930.362417] env[69328]: DEBUG nova.virt.hardware [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 930.362685] env[69328]: DEBUG nova.virt.hardware [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 930.363049] env[69328]: DEBUG nova.virt.hardware [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 930.363348] env[69328]: DEBUG nova.virt.hardware [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 930.363660] env[69328]: DEBUG nova.virt.hardware [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 930.363981] env[69328]: DEBUG nova.virt.hardware [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 930.364249] env[69328]: DEBUG nova.virt.hardware [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 930.369024] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f17904c-094c-4c7e-a875-8dd7d92badf4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.369511] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b8bf28e-b3c0-49aa-baa3-6ac96781a1d4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.378734] env[69328]: DEBUG oslo_vmware.api [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Waiting for the task: (returnval){ [ 930.378734] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52058816-2997-b029-bda5-5990fcec56a1" [ 930.378734] env[69328]: _type = "Task" [ 930.378734] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.386901] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0688089-bc1a-4509-8361-59d9a1328eab {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.397457] env[69328]: DEBUG oslo_vmware.api [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52058816-2997-b029-bda5-5990fcec56a1, 'name': SearchDatastore_Task, 'duration_secs': 0.01158} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.405681] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 930.406095] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 1f568ba1-8591-499b-b1ee-da16e26f81fc/1f568ba1-8591-499b-b1ee-da16e26f81fc.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 930.406611] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eb03a139-1048-4c12-b009-85c8ea8d223b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.416085] env[69328]: DEBUG oslo_vmware.api [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Waiting for the task: (returnval){ [ 930.416085] env[69328]: value = "task-3273568" [ 930.416085] env[69328]: _type = "Task" [ 930.416085] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.426908] env[69328]: DEBUG oslo_vmware.api [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Task: {'id': task-3273568, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.446214] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7b54d2e5-71f6-4aae-aebe-2a4213df214c tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Acquiring lock "73d5b248-3c3e-4e38-8d9c-1f9bfdb38494" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 930.446628] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7b54d2e5-71f6-4aae-aebe-2a4213df214c tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Lock "73d5b248-3c3e-4e38-8d9c-1f9bfdb38494" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 930.446930] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7b54d2e5-71f6-4aae-aebe-2a4213df214c tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Acquiring lock "73d5b248-3c3e-4e38-8d9c-1f9bfdb38494-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 930.447219] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7b54d2e5-71f6-4aae-aebe-2a4213df214c tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Lock "73d5b248-3c3e-4e38-8d9c-1f9bfdb38494-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 930.447554] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7b54d2e5-71f6-4aae-aebe-2a4213df214c tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Lock "73d5b248-3c3e-4e38-8d9c-1f9bfdb38494-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 930.449905] env[69328]: INFO nova.compute.manager [None req-7b54d2e5-71f6-4aae-aebe-2a4213df214c tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] Terminating instance [ 930.611415] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273567, 'name': CreateVM_Task, 'duration_secs': 0.352347} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.611415] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 930.611761] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.612056] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 930.612456] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 930.612753] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e2e6daa5-93f0-4899-a8d9-e4e26b8bc45c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.620720] env[69328]: DEBUG oslo_vmware.api [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 930.620720] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]521fda69-074e-d3ad-53ea-4e472c7a122f" [ 930.620720] env[69328]: _type = "Task" [ 930.620720] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.633215] env[69328]: DEBUG oslo_vmware.api [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]521fda69-074e-d3ad-53ea-4e472c7a122f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.691358] env[69328]: DEBUG nova.network.neutron [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Successfully updated port: 3a99454a-a8d4-4939-8e61-b21d121522f2 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 930.705974] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.414s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 930.706676] env[69328]: DEBUG nova.compute.manager [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 930.710412] env[69328]: DEBUG oslo_concurrency.lockutils [None req-27776706-5241-4665-a52a-b96bfa6a670b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.251s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 930.710772] env[69328]: DEBUG nova.objects.instance [None req-27776706-5241-4665-a52a-b96bfa6a670b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lazy-loading 'resources' on Instance uuid d017d08e-5f9e-4d05-8914-3320d4c87c9b {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 930.929370] env[69328]: DEBUG oslo_vmware.api [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Task: {'id': task-3273568, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.502607} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.929370] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 1f568ba1-8591-499b-b1ee-da16e26f81fc/1f568ba1-8591-499b-b1ee-da16e26f81fc.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 930.929594] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 930.930132] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-baa42dba-96de-4932-a19c-f2b12d926444 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.938448] env[69328]: DEBUG oslo_vmware.api [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Waiting for the task: (returnval){ [ 930.938448] env[69328]: value = "task-3273569" [ 930.938448] env[69328]: _type = "Task" [ 930.938448] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.948192] env[69328]: DEBUG oslo_vmware.api [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Task: {'id': task-3273569, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.954197] env[69328]: DEBUG nova.compute.manager [None req-7b54d2e5-71f6-4aae-aebe-2a4213df214c tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 930.954615] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7b54d2e5-71f6-4aae-aebe-2a4213df214c tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 930.955468] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa35c367-6441-4a46-9f7a-a99fe80e45f0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.964014] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b54d2e5-71f6-4aae-aebe-2a4213df214c tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 930.966730] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3f9714d3-3d6d-4a30-95ba-bb80afa6f174 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.974282] env[69328]: DEBUG oslo_vmware.api [None req-7b54d2e5-71f6-4aae-aebe-2a4213df214c tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Waiting for the task: (returnval){ [ 930.974282] env[69328]: value = "task-3273570" [ 930.974282] env[69328]: _type = "Task" [ 930.974282] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.983860] env[69328]: DEBUG oslo_vmware.api [None req-7b54d2e5-71f6-4aae-aebe-2a4213df214c tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273570, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.039531] env[69328]: DEBUG nova.network.neutron [req-14339df2-5b26-4eb3-a50e-c2ae4ea7fa15 req-2f6a9cf4-153e-45b3-8fa6-bf5958699d18 service nova] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Updated VIF entry in instance network info cache for port a3cab44b-0572-4007-bab9-e84ba084f70a. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 931.039621] env[69328]: DEBUG nova.network.neutron [req-14339df2-5b26-4eb3-a50e-c2ae4ea7fa15 req-2f6a9cf4-153e-45b3-8fa6-bf5958699d18 service nova] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Updating instance_info_cache with network_info: [{"id": "a3cab44b-0572-4007-bab9-e84ba084f70a", "address": "fa:16:3e:02:7d:25", "network": {"id": "3be6b159-5d5c-4752-b79d-0ed6110569d0", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-915151552-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f50ac50ef6ae4abc83a8064746de7029", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe20ef0e-0991-44d7-887d-08dddac0b56b", "external-id": "nsx-vlan-transportzone-991", "segmentation_id": 991, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3cab44b-05", "ovs_interfaceid": "a3cab44b-0572-4007-bab9-e84ba084f70a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 931.134011] env[69328]: DEBUG oslo_vmware.api [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]521fda69-074e-d3ad-53ea-4e472c7a122f, 'name': SearchDatastore_Task, 'duration_secs': 0.059992} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.134296] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 931.134611] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 931.135230] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 931.135230] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 931.135448] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 931.135742] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-214a14ce-6952-4cd1-bd87-b9fc98f8d875 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.147211] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 931.147418] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 931.148209] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-640d221b-5c9e-44cc-a781-0f413a5a88cc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.154124] env[69328]: DEBUG oslo_vmware.api [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 931.154124] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d60bd6-2f23-f961-b8ce-b97e1351f112" [ 931.154124] env[69328]: _type = "Task" [ 931.154124] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.163019] env[69328]: DEBUG oslo_vmware.api [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d60bd6-2f23-f961-b8ce-b97e1351f112, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.194520] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquiring lock "refresh_cache-b21ff3c9-d53a-4065-a271-682c2f1b895d" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 931.194683] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquired lock "refresh_cache-b21ff3c9-d53a-4065-a271-682c2f1b895d" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 931.194910] env[69328]: DEBUG nova.network.neutron [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 931.212676] env[69328]: DEBUG nova.compute.utils [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 931.213913] env[69328]: DEBUG nova.compute.manager [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 931.214175] env[69328]: DEBUG nova.network.neutron [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 931.258973] env[69328]: DEBUG nova.policy [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '43be625728f24af5a2f6a650279d689d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cdc479a290524130b9d17e627a64b65a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 931.289997] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c230130-bc6e-4335-a78d-25907c763e95 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 931.290866] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-58bdeab5-03db-4e38-9e74-92640d8c8d20 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.298914] env[69328]: DEBUG oslo_vmware.api [None req-6c230130-bc6e-4335-a78d-25907c763e95 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 931.298914] env[69328]: value = "task-3273571" [ 931.298914] env[69328]: _type = "Task" [ 931.298914] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.314036] env[69328]: DEBUG oslo_vmware.api [None req-6c230130-bc6e-4335-a78d-25907c763e95 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273571, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.448912] env[69328]: DEBUG oslo_vmware.api [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Task: {'id': task-3273569, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.095232} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.450301] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 931.451650] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba26aafd-01ab-492f-b016-8af4b7b5a6a9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.482762] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Reconfiguring VM instance instance-0000004b to attach disk [datastore1] 1f568ba1-8591-499b-b1ee-da16e26f81fc/1f568ba1-8591-499b-b1ee-da16e26f81fc.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 931.485758] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aa8fba6a-66b7-4853-ae45-a99c6504c400 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.509431] env[69328]: DEBUG oslo_vmware.api [None req-7b54d2e5-71f6-4aae-aebe-2a4213df214c tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273570, 'name': PowerOffVM_Task, 'duration_secs': 0.215807} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.510752] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b54d2e5-71f6-4aae-aebe-2a4213df214c tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 931.510935] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7b54d2e5-71f6-4aae-aebe-2a4213df214c tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 931.511256] env[69328]: DEBUG oslo_vmware.api [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Waiting for the task: (returnval){ [ 931.511256] env[69328]: value = "task-3273572" [ 931.511256] env[69328]: _type = "Task" [ 931.511256] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.511453] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c8b32163-926e-4db8-b60e-2dc6c366fbf8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.525244] env[69328]: DEBUG oslo_vmware.api [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Task: {'id': task-3273572, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.544153] env[69328]: DEBUG oslo_concurrency.lockutils [req-14339df2-5b26-4eb3-a50e-c2ae4ea7fa15 req-2f6a9cf4-153e-45b3-8fa6-bf5958699d18 service nova] Releasing lock "refresh_cache-76210566-12d7-4f6a-afa1-6329e87e0f85" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 931.563840] env[69328]: DEBUG nova.network.neutron [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Successfully created port: c68f39c4-deae-4739-bc3f-9284775789c4 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 931.585096] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7b54d2e5-71f6-4aae-aebe-2a4213df214c tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 931.585383] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7b54d2e5-71f6-4aae-aebe-2a4213df214c tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 931.585570] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b54d2e5-71f6-4aae-aebe-2a4213df214c tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Deleting the datastore file [datastore2] 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 931.588411] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-67f9f35e-7168-4178-a185-eb7348095f11 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.596669] env[69328]: DEBUG oslo_vmware.api [None req-7b54d2e5-71f6-4aae-aebe-2a4213df214c tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Waiting for the task: (returnval){ [ 931.596669] env[69328]: value = "task-3273574" [ 931.596669] env[69328]: _type = "Task" [ 931.596669] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.606842] env[69328]: DEBUG oslo_vmware.api [None req-7b54d2e5-71f6-4aae-aebe-2a4213df214c tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273574, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.626186] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4874053-0c74-4b99-93b3-ae19d12cf815 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.634589] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9ff41d2-498c-41ff-bcc8-e2c7e3d614eb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.668791] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8d3eb09-e5e1-42f0-be10-8e65c52a443b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.681208] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3a09d83-0532-46cd-99aa-a946dd935b48 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.685375] env[69328]: DEBUG oslo_vmware.api [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d60bd6-2f23-f961-b8ce-b97e1351f112, 'name': SearchDatastore_Task, 'duration_secs': 0.013054} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.686849] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d1c719f5-0092-4e58-8b3f-e3e4b160185a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.703260] env[69328]: DEBUG nova.compute.provider_tree [None req-27776706-5241-4665-a52a-b96bfa6a670b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 931.710535] env[69328]: DEBUG oslo_vmware.api [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 931.710535] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52cfe013-f099-5a7c-a068-ed6e6008dd93" [ 931.710535] env[69328]: _type = "Task" [ 931.710535] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.719111] env[69328]: DEBUG oslo_vmware.api [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52cfe013-f099-5a7c-a068-ed6e6008dd93, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.719678] env[69328]: DEBUG nova.compute.manager [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 931.737197] env[69328]: DEBUG nova.network.neutron [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 931.810643] env[69328]: DEBUG oslo_vmware.api [None req-6c230130-bc6e-4335-a78d-25907c763e95 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273571, 'name': PowerOffVM_Task, 'duration_secs': 0.451339} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.810801] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c230130-bc6e-4335-a78d-25907c763e95 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 931.811866] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8f1db8a-27c9-4041-9dc1-e3ff38beb5e8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.831690] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dbc5111-6681-4816-aaae-f178b7da9159 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.866068] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c230130-bc6e-4335-a78d-25907c763e95 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 931.866240] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1470f2a6-d671-4d9e-8b51-2cd7c7569f3c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.874449] env[69328]: DEBUG oslo_vmware.api [None req-6c230130-bc6e-4335-a78d-25907c763e95 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 931.874449] env[69328]: value = "task-3273575" [ 931.874449] env[69328]: _type = "Task" [ 931.874449] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.878429] env[69328]: DEBUG nova.network.neutron [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Updating instance_info_cache with network_info: [{"id": "3a99454a-a8d4-4939-8e61-b21d121522f2", "address": "fa:16:3e:68:17:25", "network": {"id": "5b6ce910-5dc1-49bb-ad4b-fe1a86509fc3", "bridge": "br-int", "label": "tempest-ImagesTestJSON-224847435-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "636412f89c9d488a9cfd6f19ef046efc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d1e1e320-ec56-4fcc-b6e9-30aa210d3b36", "external-id": "nsx-vlan-transportzone-447", "segmentation_id": 447, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a99454a-a8", "ovs_interfaceid": "3a99454a-a8d4-4939-8e61-b21d121522f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 931.886599] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c230130-bc6e-4335-a78d-25907c763e95 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] VM already powered off {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 931.886599] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-6c230130-bc6e-4335-a78d-25907c763e95 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 931.886599] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6c230130-bc6e-4335-a78d-25907c763e95 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 932.026019] env[69328]: DEBUG oslo_vmware.api [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Task: {'id': task-3273572, 'name': ReconfigVM_Task, 'duration_secs': 0.436951} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.026019] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Reconfigured VM instance instance-0000004b to attach disk [datastore1] 1f568ba1-8591-499b-b1ee-da16e26f81fc/1f568ba1-8591-499b-b1ee-da16e26f81fc.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 932.026019] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-90c0f0dd-28ed-41dc-93c5-58c957702cc1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.030819] env[69328]: DEBUG oslo_vmware.api [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Waiting for the task: (returnval){ [ 932.030819] env[69328]: value = "task-3273576" [ 932.030819] env[69328]: _type = "Task" [ 932.030819] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.039711] env[69328]: DEBUG oslo_vmware.api [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Task: {'id': task-3273576, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.106818] env[69328]: DEBUG oslo_vmware.api [None req-7b54d2e5-71f6-4aae-aebe-2a4213df214c tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Task: {'id': task-3273574, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.235468} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.107091] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b54d2e5-71f6-4aae-aebe-2a4213df214c tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 932.107278] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7b54d2e5-71f6-4aae-aebe-2a4213df214c tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 932.107453] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7b54d2e5-71f6-4aae-aebe-2a4213df214c tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 932.107624] env[69328]: INFO nova.compute.manager [None req-7b54d2e5-71f6-4aae-aebe-2a4213df214c tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] Took 1.15 seconds to destroy the instance on the hypervisor. [ 932.107864] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7b54d2e5-71f6-4aae-aebe-2a4213df214c tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 932.108129] env[69328]: DEBUG nova.compute.manager [-] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 932.108283] env[69328]: DEBUG nova.network.neutron [-] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 932.209016] env[69328]: DEBUG nova.scheduler.client.report [None req-27776706-5241-4665-a52a-b96bfa6a670b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 932.223021] env[69328]: DEBUG oslo_vmware.api [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52cfe013-f099-5a7c-a068-ed6e6008dd93, 'name': SearchDatastore_Task, 'duration_secs': 0.034412} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.226707] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 932.227206] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 76210566-12d7-4f6a-afa1-6329e87e0f85/76210566-12d7-4f6a-afa1-6329e87e0f85.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 932.228354] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6c230130-bc6e-4335-a78d-25907c763e95 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 932.228697] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c230130-bc6e-4335-a78d-25907c763e95 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 932.229067] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8c07d6eb-ae55-46b0-9c40-385787791afc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.231878] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cb45cb07-8166-4ee3-a401-742783c6ce45 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.243316] env[69328]: DEBUG oslo_vmware.api [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 932.243316] env[69328]: value = "task-3273577" [ 932.243316] env[69328]: _type = "Task" [ 932.243316] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.245613] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c230130-bc6e-4335-a78d-25907c763e95 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 932.246101] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-6c230130-bc6e-4335-a78d-25907c763e95 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 932.252653] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-224c657c-096e-4581-8ce9-d6237ced5074 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.261527] env[69328]: DEBUG oslo_vmware.api [None req-6c230130-bc6e-4335-a78d-25907c763e95 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 932.261527] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ccea92-8ce9-3e41-1a9e-40d9154d7431" [ 932.261527] env[69328]: _type = "Task" [ 932.261527] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.263459] env[69328]: DEBUG oslo_vmware.api [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273577, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.278180] env[69328]: DEBUG oslo_vmware.api [None req-6c230130-bc6e-4335-a78d-25907c763e95 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ccea92-8ce9-3e41-1a9e-40d9154d7431, 'name': SearchDatastore_Task, 'duration_secs': 0.012731} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.279398] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5eded634-8305-4b6b-bfd9-74a0d0e12fcf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.286500] env[69328]: DEBUG oslo_vmware.api [None req-6c230130-bc6e-4335-a78d-25907c763e95 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 932.286500] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52723e69-e79f-1fda-2558-ff4871f5ccf3" [ 932.286500] env[69328]: _type = "Task" [ 932.286500] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.298033] env[69328]: DEBUG oslo_vmware.api [None req-6c230130-bc6e-4335-a78d-25907c763e95 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52723e69-e79f-1fda-2558-ff4871f5ccf3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.309571] env[69328]: DEBUG nova.compute.manager [req-823b25dd-cd64-435a-9eb5-bc8db016c15b req-8d616780-6b5a-44ad-a875-1e7470a16656 service nova] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Received event network-vif-plugged-3a99454a-a8d4-4939-8e61-b21d121522f2 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 932.309732] env[69328]: DEBUG oslo_concurrency.lockutils [req-823b25dd-cd64-435a-9eb5-bc8db016c15b req-8d616780-6b5a-44ad-a875-1e7470a16656 service nova] Acquiring lock "b21ff3c9-d53a-4065-a271-682c2f1b895d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 932.310013] env[69328]: DEBUG oslo_concurrency.lockutils [req-823b25dd-cd64-435a-9eb5-bc8db016c15b req-8d616780-6b5a-44ad-a875-1e7470a16656 service nova] Lock "b21ff3c9-d53a-4065-a271-682c2f1b895d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 932.310630] env[69328]: DEBUG oslo_concurrency.lockutils [req-823b25dd-cd64-435a-9eb5-bc8db016c15b req-8d616780-6b5a-44ad-a875-1e7470a16656 service nova] Lock "b21ff3c9-d53a-4065-a271-682c2f1b895d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 932.310830] env[69328]: DEBUG nova.compute.manager [req-823b25dd-cd64-435a-9eb5-bc8db016c15b req-8d616780-6b5a-44ad-a875-1e7470a16656 service nova] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] No waiting events found dispatching network-vif-plugged-3a99454a-a8d4-4939-8e61-b21d121522f2 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 932.311043] env[69328]: WARNING nova.compute.manager [req-823b25dd-cd64-435a-9eb5-bc8db016c15b req-8d616780-6b5a-44ad-a875-1e7470a16656 service nova] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Received unexpected event network-vif-plugged-3a99454a-a8d4-4939-8e61-b21d121522f2 for instance with vm_state building and task_state spawning. [ 932.311216] env[69328]: DEBUG nova.compute.manager [req-823b25dd-cd64-435a-9eb5-bc8db016c15b req-8d616780-6b5a-44ad-a875-1e7470a16656 service nova] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Received event network-changed-3a99454a-a8d4-4939-8e61-b21d121522f2 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 932.311373] env[69328]: DEBUG nova.compute.manager [req-823b25dd-cd64-435a-9eb5-bc8db016c15b req-8d616780-6b5a-44ad-a875-1e7470a16656 service nova] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Refreshing instance network info cache due to event network-changed-3a99454a-a8d4-4939-8e61-b21d121522f2. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 932.311592] env[69328]: DEBUG oslo_concurrency.lockutils [req-823b25dd-cd64-435a-9eb5-bc8db016c15b req-8d616780-6b5a-44ad-a875-1e7470a16656 service nova] Acquiring lock "refresh_cache-b21ff3c9-d53a-4065-a271-682c2f1b895d" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 932.381596] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Releasing lock "refresh_cache-b21ff3c9-d53a-4065-a271-682c2f1b895d" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 932.381987] env[69328]: DEBUG nova.compute.manager [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Instance network_info: |[{"id": "3a99454a-a8d4-4939-8e61-b21d121522f2", "address": "fa:16:3e:68:17:25", "network": {"id": "5b6ce910-5dc1-49bb-ad4b-fe1a86509fc3", "bridge": "br-int", "label": "tempest-ImagesTestJSON-224847435-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "636412f89c9d488a9cfd6f19ef046efc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d1e1e320-ec56-4fcc-b6e9-30aa210d3b36", "external-id": "nsx-vlan-transportzone-447", "segmentation_id": 447, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a99454a-a8", "ovs_interfaceid": "3a99454a-a8d4-4939-8e61-b21d121522f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 932.382511] env[69328]: DEBUG oslo_concurrency.lockutils [req-823b25dd-cd64-435a-9eb5-bc8db016c15b req-8d616780-6b5a-44ad-a875-1e7470a16656 service nova] Acquired lock "refresh_cache-b21ff3c9-d53a-4065-a271-682c2f1b895d" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 932.382662] env[69328]: DEBUG nova.network.neutron [req-823b25dd-cd64-435a-9eb5-bc8db016c15b req-8d616780-6b5a-44ad-a875-1e7470a16656 service nova] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Refreshing network info cache for port 3a99454a-a8d4-4939-8e61-b21d121522f2 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 932.384591] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:68:17:25', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd1e1e320-ec56-4fcc-b6e9-30aa210d3b36', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3a99454a-a8d4-4939-8e61-b21d121522f2', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 932.393586] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 932.394258] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 932.394406] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c2aad88b-fe9f-4f37-9a82-4248d8a0f7f1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.418168] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 932.418168] env[69328]: value = "task-3273578" [ 932.418168] env[69328]: _type = "Task" [ 932.418168] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.432403] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273578, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.449730] env[69328]: DEBUG nova.compute.manager [req-be59ea73-0835-48f0-a15e-c9b9d92c9c7e req-edae847b-4ef7-48ea-81ca-9cc909e877f2 service nova] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] Received event network-vif-deleted-46bc5af6-cdfc-4468-936e-604560442c91 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 932.449931] env[69328]: INFO nova.compute.manager [req-be59ea73-0835-48f0-a15e-c9b9d92c9c7e req-edae847b-4ef7-48ea-81ca-9cc909e877f2 service nova] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] Neutron deleted interface 46bc5af6-cdfc-4468-936e-604560442c91; detaching it from the instance and deleting it from the info cache [ 932.450105] env[69328]: DEBUG nova.network.neutron [req-be59ea73-0835-48f0-a15e-c9b9d92c9c7e req-edae847b-4ef7-48ea-81ca-9cc909e877f2 service nova] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 932.544053] env[69328]: DEBUG oslo_vmware.api [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Task: {'id': task-3273576, 'name': Rename_Task, 'duration_secs': 0.342538} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.544485] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 932.544818] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6f930a77-14fc-4a5f-9e35-4eb86143c024 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.554876] env[69328]: DEBUG oslo_vmware.api [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Waiting for the task: (returnval){ [ 932.554876] env[69328]: value = "task-3273579" [ 932.554876] env[69328]: _type = "Task" [ 932.554876] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.565158] env[69328]: DEBUG oslo_vmware.api [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Task: {'id': task-3273579, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.716342] env[69328]: DEBUG oslo_concurrency.lockutils [None req-27776706-5241-4665-a52a-b96bfa6a670b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.006s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 932.719080] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.830s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 932.720926] env[69328]: INFO nova.compute.claims [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 932.729884] env[69328]: DEBUG nova.compute.manager [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 932.750078] env[69328]: INFO nova.scheduler.client.report [None req-27776706-5241-4665-a52a-b96bfa6a670b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Deleted allocations for instance d017d08e-5f9e-4d05-8914-3320d4c87c9b [ 932.761487] env[69328]: DEBUG oslo_vmware.api [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273577, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.774204] env[69328]: DEBUG nova.virt.hardware [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 932.774605] env[69328]: DEBUG nova.virt.hardware [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 932.774685] env[69328]: DEBUG nova.virt.hardware [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 932.774860] env[69328]: DEBUG nova.virt.hardware [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 932.775011] env[69328]: DEBUG nova.virt.hardware [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 932.775368] env[69328]: DEBUG nova.virt.hardware [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 932.775684] env[69328]: DEBUG nova.virt.hardware [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 932.775886] env[69328]: DEBUG nova.virt.hardware [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 932.776171] env[69328]: DEBUG nova.virt.hardware [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 932.776393] env[69328]: DEBUG nova.virt.hardware [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 932.776755] env[69328]: DEBUG nova.virt.hardware [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 932.777877] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d818fa52-e023-4ff9-be0f-68ebb833f5f5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.787585] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58b3d448-d0ee-40f7-aff8-e9f579c94fe1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.801806] env[69328]: DEBUG oslo_vmware.api [None req-6c230130-bc6e-4335-a78d-25907c763e95 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52723e69-e79f-1fda-2558-ff4871f5ccf3, 'name': SearchDatastore_Task, 'duration_secs': 0.01408} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.813079] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6c230130-bc6e-4335-a78d-25907c763e95 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 932.813477] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c230130-bc6e-4335-a78d-25907c763e95 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 1413dcfe-3570-4657-b811-81a1acc159d1/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318-rescue.vmdk. {{(pid=69328) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 932.814894] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dfe6d13b-ad81-4e2a-a9cb-786e5b4c05b4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.823842] env[69328]: DEBUG oslo_vmware.api [None req-6c230130-bc6e-4335-a78d-25907c763e95 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 932.823842] env[69328]: value = "task-3273580" [ 932.823842] env[69328]: _type = "Task" [ 932.823842] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.833151] env[69328]: DEBUG oslo_vmware.api [None req-6c230130-bc6e-4335-a78d-25907c763e95 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273580, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.929692] env[69328]: DEBUG nova.network.neutron [-] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 932.930976] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273578, 'name': CreateVM_Task, 'duration_secs': 0.499326} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.933526] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 932.934466] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 932.934730] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 932.935128] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 932.935727] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8eb5d88-598b-4a5e-afad-dff59c0af149 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.941193] env[69328]: DEBUG oslo_vmware.api [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 932.941193] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f36e16-e693-7ce9-b8c4-d337f1bbba0a" [ 932.941193] env[69328]: _type = "Task" [ 932.941193] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.951846] env[69328]: DEBUG oslo_vmware.api [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f36e16-e693-7ce9-b8c4-d337f1bbba0a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.952447] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0ae9c703-131c-49c9-a0c5-40569dcac471 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.961956] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cec5d164-3bd8-4bd1-a379-ef4aa21ea1da {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.003926] env[69328]: DEBUG nova.compute.manager [req-be59ea73-0835-48f0-a15e-c9b9d92c9c7e req-edae847b-4ef7-48ea-81ca-9cc909e877f2 service nova] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] Detach interface failed, port_id=46bc5af6-cdfc-4468-936e-604560442c91, reason: Instance 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494 could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 933.069064] env[69328]: DEBUG oslo_vmware.api [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Task: {'id': task-3273579, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.138098] env[69328]: DEBUG nova.network.neutron [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Successfully updated port: c68f39c4-deae-4739-bc3f-9284775789c4 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 933.204629] env[69328]: DEBUG nova.network.neutron [req-823b25dd-cd64-435a-9eb5-bc8db016c15b req-8d616780-6b5a-44ad-a875-1e7470a16656 service nova] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Updated VIF entry in instance network info cache for port 3a99454a-a8d4-4939-8e61-b21d121522f2. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 933.205027] env[69328]: DEBUG nova.network.neutron [req-823b25dd-cd64-435a-9eb5-bc8db016c15b req-8d616780-6b5a-44ad-a875-1e7470a16656 service nova] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Updating instance_info_cache with network_info: [{"id": "3a99454a-a8d4-4939-8e61-b21d121522f2", "address": "fa:16:3e:68:17:25", "network": {"id": "5b6ce910-5dc1-49bb-ad4b-fe1a86509fc3", "bridge": "br-int", "label": "tempest-ImagesTestJSON-224847435-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "636412f89c9d488a9cfd6f19ef046efc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d1e1e320-ec56-4fcc-b6e9-30aa210d3b36", "external-id": "nsx-vlan-transportzone-447", "segmentation_id": 447, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a99454a-a8", "ovs_interfaceid": "3a99454a-a8d4-4939-8e61-b21d121522f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 933.258622] env[69328]: DEBUG oslo_vmware.api [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273577, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.517206} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.260161] env[69328]: DEBUG oslo_concurrency.lockutils [None req-27776706-5241-4665-a52a-b96bfa6a670b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lock "d017d08e-5f9e-4d05-8914-3320d4c87c9b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.003s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 933.260161] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 76210566-12d7-4f6a-afa1-6329e87e0f85/76210566-12d7-4f6a-afa1-6329e87e0f85.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 933.260319] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 933.260731] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-65ab837c-f890-4332-bee2-e7832577ab16 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.272274] env[69328]: DEBUG oslo_vmware.api [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 933.272274] env[69328]: value = "task-3273581" [ 933.272274] env[69328]: _type = "Task" [ 933.272274] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.284988] env[69328]: DEBUG oslo_vmware.api [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273581, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.337160] env[69328]: DEBUG oslo_vmware.api [None req-6c230130-bc6e-4335-a78d-25907c763e95 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273580, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.512435} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.337448] env[69328]: INFO nova.virt.vmwareapi.ds_util [None req-6c230130-bc6e-4335-a78d-25907c763e95 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 1413dcfe-3570-4657-b811-81a1acc159d1/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318-rescue.vmdk. [ 933.338276] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03147b4d-47b2-49f7-8660-bade0578f0d0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.367013] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c230130-bc6e-4335-a78d-25907c763e95 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Reconfiguring VM instance instance-00000049 to attach disk [datastore1] 1413dcfe-3570-4657-b811-81a1acc159d1/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318-rescue.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 933.367493] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cf5f42e6-db52-420c-9ce8-2fe528007b11 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.391179] env[69328]: DEBUG oslo_vmware.api [None req-6c230130-bc6e-4335-a78d-25907c763e95 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 933.391179] env[69328]: value = "task-3273582" [ 933.391179] env[69328]: _type = "Task" [ 933.391179] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.401193] env[69328]: DEBUG oslo_vmware.api [None req-6c230130-bc6e-4335-a78d-25907c763e95 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273582, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.434726] env[69328]: INFO nova.compute.manager [-] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] Took 1.33 seconds to deallocate network for instance. [ 933.460655] env[69328]: DEBUG oslo_vmware.api [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f36e16-e693-7ce9-b8c4-d337f1bbba0a, 'name': SearchDatastore_Task, 'duration_secs': 0.010507} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.461013] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 933.461272] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 933.461505] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.465020] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 933.465020] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 933.465020] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6e236c14-bb87-4aa7-836e-a5bac2ef450a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.471924] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 933.472205] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 933.472865] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ee0243e-c0c4-4725-8795-ec2393b3cef0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.478452] env[69328]: DEBUG oslo_vmware.api [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 933.478452] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52a1520c-7bf9-a646-4a26-d8c257cc5359" [ 933.478452] env[69328]: _type = "Task" [ 933.478452] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.487763] env[69328]: DEBUG oslo_vmware.api [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52a1520c-7bf9-a646-4a26-d8c257cc5359, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.566653] env[69328]: DEBUG oslo_vmware.api [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Task: {'id': task-3273579, 'name': PowerOnVM_Task, 'duration_secs': 0.684001} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.566925] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 933.567178] env[69328]: INFO nova.compute.manager [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Took 10.24 seconds to spawn the instance on the hypervisor. [ 933.567324] env[69328]: DEBUG nova.compute.manager [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 933.568115] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79976d84-8667-4362-a53d-a01d74bec266 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.640657] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "refresh_cache-0a485411-3206-4674-90e4-58df4a8b755a" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.640782] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquired lock "refresh_cache-0a485411-3206-4674-90e4-58df4a8b755a" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 933.641033] env[69328]: DEBUG nova.network.neutron [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 933.708822] env[69328]: DEBUG oslo_concurrency.lockutils [req-823b25dd-cd64-435a-9eb5-bc8db016c15b req-8d616780-6b5a-44ad-a875-1e7470a16656 service nova] Releasing lock "refresh_cache-b21ff3c9-d53a-4065-a271-682c2f1b895d" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 933.785220] env[69328]: DEBUG oslo_vmware.api [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273581, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.136649} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.785492] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 933.786312] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b05aca5-2d8b-44ec-a7bc-5edd415bc266 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.811727] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] 76210566-12d7-4f6a-afa1-6329e87e0f85/76210566-12d7-4f6a-afa1-6329e87e0f85.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 933.814355] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f873ab4c-d5f6-4840-8483-a4ef61d700b5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.839265] env[69328]: DEBUG oslo_vmware.api [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 933.839265] env[69328]: value = "task-3273583" [ 933.839265] env[69328]: _type = "Task" [ 933.839265] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.849566] env[69328]: DEBUG oslo_vmware.api [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273583, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.900991] env[69328]: DEBUG oslo_vmware.api [None req-6c230130-bc6e-4335-a78d-25907c763e95 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273582, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.945133] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7b54d2e5-71f6-4aae-aebe-2a4213df214c tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 933.989013] env[69328]: DEBUG oslo_vmware.api [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52a1520c-7bf9-a646-4a26-d8c257cc5359, 'name': SearchDatastore_Task, 'duration_secs': 0.009658} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.992137] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03258552-fb8a-4231-ac91-bc1e92ea1d20 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.997602] env[69328]: DEBUG oslo_vmware.api [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 933.997602] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]525c69ca-8fc3-0c47-246f-ca4e6ec62e0c" [ 933.997602] env[69328]: _type = "Task" [ 933.997602] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.007283] env[69328]: DEBUG oslo_vmware.api [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]525c69ca-8fc3-0c47-246f-ca4e6ec62e0c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.075027] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67c9a8a7-7d42-4a50-babe-2c8866cafb0d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.085022] env[69328]: INFO nova.compute.manager [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Took 45.37 seconds to build instance. [ 934.091491] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebc66337-86b2-4d11-90d7-6a6e11e1b651 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.124234] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-532d5164-006e-4a38-bf80-8ccf426297e0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.132439] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efb4a032-f544-4274-b9d6-eca59352b707 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.148161] env[69328]: DEBUG nova.compute.provider_tree [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 934.177539] env[69328]: DEBUG nova.network.neutron [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 934.352486] env[69328]: DEBUG oslo_vmware.api [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273583, 'name': ReconfigVM_Task, 'duration_secs': 0.346197} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.353459] env[69328]: DEBUG nova.network.neutron [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Updating instance_info_cache with network_info: [{"id": "c68f39c4-deae-4739-bc3f-9284775789c4", "address": "fa:16:3e:ea:9c:3d", "network": {"id": "620f277d-ca49-41da-83a0-afb8c393e26e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1223326239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cdc479a290524130b9d17e627a64b65a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86a35d07-53d3-46b3-92cb-ae34236c0f41", "external-id": "nsx-vlan-transportzone-811", "segmentation_id": 811, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc68f39c4-de", "ovs_interfaceid": "c68f39c4-deae-4739-bc3f-9284775789c4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 934.354792] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Reconfigured VM instance instance-0000004c to attach disk [datastore1] 76210566-12d7-4f6a-afa1-6329e87e0f85/76210566-12d7-4f6a-afa1-6329e87e0f85.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 934.355486] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-97eb4917-bbf9-40ec-8705-3c987da451b6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.363293] env[69328]: DEBUG oslo_vmware.api [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 934.363293] env[69328]: value = "task-3273584" [ 934.363293] env[69328]: _type = "Task" [ 934.363293] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.373432] env[69328]: DEBUG oslo_vmware.api [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273584, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.390904] env[69328]: DEBUG nova.compute.manager [req-f35d43fb-bcc9-41c9-a9e0-4f670932c38d req-845fce42-1ea7-4e96-9359-05ae5fd17aa7 service nova] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Received event network-vif-plugged-c68f39c4-deae-4739-bc3f-9284775789c4 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 934.391224] env[69328]: DEBUG oslo_concurrency.lockutils [req-f35d43fb-bcc9-41c9-a9e0-4f670932c38d req-845fce42-1ea7-4e96-9359-05ae5fd17aa7 service nova] Acquiring lock "0a485411-3206-4674-90e4-58df4a8b755a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 934.391360] env[69328]: DEBUG oslo_concurrency.lockutils [req-f35d43fb-bcc9-41c9-a9e0-4f670932c38d req-845fce42-1ea7-4e96-9359-05ae5fd17aa7 service nova] Lock "0a485411-3206-4674-90e4-58df4a8b755a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 934.391596] env[69328]: DEBUG oslo_concurrency.lockutils [req-f35d43fb-bcc9-41c9-a9e0-4f670932c38d req-845fce42-1ea7-4e96-9359-05ae5fd17aa7 service nova] Lock "0a485411-3206-4674-90e4-58df4a8b755a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 934.391904] env[69328]: DEBUG nova.compute.manager [req-f35d43fb-bcc9-41c9-a9e0-4f670932c38d req-845fce42-1ea7-4e96-9359-05ae5fd17aa7 service nova] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] No waiting events found dispatching network-vif-plugged-c68f39c4-deae-4739-bc3f-9284775789c4 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 934.391943] env[69328]: WARNING nova.compute.manager [req-f35d43fb-bcc9-41c9-a9e0-4f670932c38d req-845fce42-1ea7-4e96-9359-05ae5fd17aa7 service nova] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Received unexpected event network-vif-plugged-c68f39c4-deae-4739-bc3f-9284775789c4 for instance with vm_state building and task_state spawning. [ 934.392194] env[69328]: DEBUG nova.compute.manager [req-f35d43fb-bcc9-41c9-a9e0-4f670932c38d req-845fce42-1ea7-4e96-9359-05ae5fd17aa7 service nova] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Received event network-changed-c68f39c4-deae-4739-bc3f-9284775789c4 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 934.392315] env[69328]: DEBUG nova.compute.manager [req-f35d43fb-bcc9-41c9-a9e0-4f670932c38d req-845fce42-1ea7-4e96-9359-05ae5fd17aa7 service nova] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Refreshing instance network info cache due to event network-changed-c68f39c4-deae-4739-bc3f-9284775789c4. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 934.392421] env[69328]: DEBUG oslo_concurrency.lockutils [req-f35d43fb-bcc9-41c9-a9e0-4f670932c38d req-845fce42-1ea7-4e96-9359-05ae5fd17aa7 service nova] Acquiring lock "refresh_cache-0a485411-3206-4674-90e4-58df4a8b755a" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 934.403991] env[69328]: DEBUG oslo_vmware.api [None req-6c230130-bc6e-4335-a78d-25907c763e95 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273582, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.512716] env[69328]: DEBUG oslo_vmware.api [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]525c69ca-8fc3-0c47-246f-ca4e6ec62e0c, 'name': SearchDatastore_Task, 'duration_secs': 0.009968} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.513012] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 934.513310] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] b21ff3c9-d53a-4065-a271-682c2f1b895d/b21ff3c9-d53a-4065-a271-682c2f1b895d.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 934.513566] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2bc98800-2050-4be4-9de5-335b5abd306f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.521591] env[69328]: DEBUG oslo_vmware.api [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 934.521591] env[69328]: value = "task-3273585" [ 934.521591] env[69328]: _type = "Task" [ 934.521591] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.531071] env[69328]: DEBUG oslo_vmware.api [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273585, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.586381] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b9cc33d6-28bf-4282-8f4d-5d3d6319616c tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Lock "1f568ba1-8591-499b-b1ee-da16e26f81fc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.883s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 934.683018] env[69328]: DEBUG nova.scheduler.client.report [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Updated inventory for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with generation 107 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 934.683347] env[69328]: DEBUG nova.compute.provider_tree [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Updating resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e generation from 107 to 108 during operation: update_inventory {{(pid=69328) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 934.683537] env[69328]: DEBUG nova.compute.provider_tree [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 934.856346] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Releasing lock "refresh_cache-0a485411-3206-4674-90e4-58df4a8b755a" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 934.856840] env[69328]: DEBUG nova.compute.manager [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Instance network_info: |[{"id": "c68f39c4-deae-4739-bc3f-9284775789c4", "address": "fa:16:3e:ea:9c:3d", "network": {"id": "620f277d-ca49-41da-83a0-afb8c393e26e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1223326239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cdc479a290524130b9d17e627a64b65a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86a35d07-53d3-46b3-92cb-ae34236c0f41", "external-id": "nsx-vlan-transportzone-811", "segmentation_id": 811, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc68f39c4-de", "ovs_interfaceid": "c68f39c4-deae-4739-bc3f-9284775789c4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 934.857523] env[69328]: DEBUG oslo_concurrency.lockutils [req-f35d43fb-bcc9-41c9-a9e0-4f670932c38d req-845fce42-1ea7-4e96-9359-05ae5fd17aa7 service nova] Acquired lock "refresh_cache-0a485411-3206-4674-90e4-58df4a8b755a" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 934.857523] env[69328]: DEBUG nova.network.neutron [req-f35d43fb-bcc9-41c9-a9e0-4f670932c38d req-845fce42-1ea7-4e96-9359-05ae5fd17aa7 service nova] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Refreshing network info cache for port c68f39c4-deae-4739-bc3f-9284775789c4 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 934.859045] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ea:9c:3d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '86a35d07-53d3-46b3-92cb-ae34236c0f41', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c68f39c4-deae-4739-bc3f-9284775789c4', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 934.867209] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 934.867709] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 934.872063] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4ab05395-1f51-4f59-a194-e18ba97657bc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.895847] env[69328]: DEBUG oslo_vmware.api [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273584, 'name': Rename_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.900187] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 934.900187] env[69328]: value = "task-3273586" [ 934.900187] env[69328]: _type = "Task" [ 934.900187] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.908444] env[69328]: DEBUG oslo_vmware.api [None req-6c230130-bc6e-4335-a78d-25907c763e95 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273582, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.916287] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273586, 'name': CreateVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.008508] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cc57710c-7228-448c-9e4f-bc5e76ac2170 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Acquiring lock "1f568ba1-8591-499b-b1ee-da16e26f81fc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 935.008964] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cc57710c-7228-448c-9e4f-bc5e76ac2170 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Lock "1f568ba1-8591-499b-b1ee-da16e26f81fc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 935.009320] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cc57710c-7228-448c-9e4f-bc5e76ac2170 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Acquiring lock "1f568ba1-8591-499b-b1ee-da16e26f81fc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 935.009632] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cc57710c-7228-448c-9e4f-bc5e76ac2170 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Lock "1f568ba1-8591-499b-b1ee-da16e26f81fc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 935.009904] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cc57710c-7228-448c-9e4f-bc5e76ac2170 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Lock "1f568ba1-8591-499b-b1ee-da16e26f81fc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 935.012541] env[69328]: INFO nova.compute.manager [None req-cc57710c-7228-448c-9e4f-bc5e76ac2170 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Terminating instance [ 935.034964] env[69328]: DEBUG oslo_vmware.api [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273585, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.495168} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.035317] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] b21ff3c9-d53a-4065-a271-682c2f1b895d/b21ff3c9-d53a-4065-a271-682c2f1b895d.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 935.035518] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 935.035812] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3ac48df9-b65b-4faf-8193-67dcea639632 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.043457] env[69328]: DEBUG oslo_vmware.api [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 935.043457] env[69328]: value = "task-3273587" [ 935.043457] env[69328]: _type = "Task" [ 935.043457] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.054443] env[69328]: DEBUG oslo_vmware.api [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273587, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.189091] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.470s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 935.189691] env[69328]: DEBUG nova.compute.manager [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 935.192360] env[69328]: DEBUG oslo_concurrency.lockutils [None req-039d3904-cd51-4387-9b2a-bb7e74f42aee tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.837s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 935.192596] env[69328]: DEBUG nova.objects.instance [None req-039d3904-cd51-4387-9b2a-bb7e74f42aee tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Lazy-loading 'resources' on Instance uuid d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 935.380762] env[69328]: DEBUG oslo_vmware.api [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273584, 'name': Rename_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.419449] env[69328]: DEBUG oslo_vmware.api [None req-6c230130-bc6e-4335-a78d-25907c763e95 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273582, 'name': ReconfigVM_Task, 'duration_secs': 1.647566} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.419726] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273586, 'name': CreateVM_Task, 'duration_secs': 0.423501} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.419936] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c230130-bc6e-4335-a78d-25907c763e95 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Reconfigured VM instance instance-00000049 to attach disk [datastore1] 1413dcfe-3570-4657-b811-81a1acc159d1/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318-rescue.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 935.422674] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 935.423508] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc3e47c3-3296-44e1-9d45-8d102d2fe22a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.426525] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 935.426691] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 935.427055] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 935.429611] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-688c813c-2737-4f5b-baa0-07b01b5fb9f4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.435457] env[69328]: DEBUG oslo_vmware.api [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 935.435457] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f96511-837e-4c90-0235-6bfa61439b10" [ 935.435457] env[69328]: _type = "Task" [ 935.435457] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.458993] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ceeb3bdc-78c2-4822-b5cb-ac13092d9dd8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.474868] env[69328]: DEBUG oslo_vmware.api [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f96511-837e-4c90-0235-6bfa61439b10, 'name': SearchDatastore_Task, 'duration_secs': 0.010587} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.476128] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 935.476364] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 935.476647] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 935.476743] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 935.476906] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 935.477238] env[69328]: DEBUG oslo_vmware.api [None req-6c230130-bc6e-4335-a78d-25907c763e95 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 935.477238] env[69328]: value = "task-3273588" [ 935.477238] env[69328]: _type = "Task" [ 935.477238] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.477412] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-50744ee3-e1e1-4160-b8da-cf8c4fbbb5d8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.487111] env[69328]: DEBUG oslo_vmware.api [None req-6c230130-bc6e-4335-a78d-25907c763e95 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273588, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.488720] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 935.488914] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 935.489937] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-07c97c01-771b-403f-950d-b9d83e262c9f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.495209] env[69328]: DEBUG oslo_vmware.api [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 935.495209] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52435cab-75e8-dde5-6f24-33d75acb000c" [ 935.495209] env[69328]: _type = "Task" [ 935.495209] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.503720] env[69328]: DEBUG oslo_vmware.api [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52435cab-75e8-dde5-6f24-33d75acb000c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.518250] env[69328]: DEBUG nova.compute.manager [None req-cc57710c-7228-448c-9e4f-bc5e76ac2170 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 935.518379] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-cc57710c-7228-448c-9e4f-bc5e76ac2170 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 935.519194] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a891bc89-6d4e-4239-863f-4a563809e593 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.526820] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc57710c-7228-448c-9e4f-bc5e76ac2170 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 935.527111] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b8f61848-5a9c-44eb-a265-ff43b1ea68a3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.533708] env[69328]: DEBUG oslo_vmware.api [None req-cc57710c-7228-448c-9e4f-bc5e76ac2170 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Waiting for the task: (returnval){ [ 935.533708] env[69328]: value = "task-3273589" [ 935.533708] env[69328]: _type = "Task" [ 935.533708] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.542736] env[69328]: DEBUG oslo_vmware.api [None req-cc57710c-7228-448c-9e4f-bc5e76ac2170 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Task: {'id': task-3273589, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.553778] env[69328]: DEBUG oslo_vmware.api [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273587, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075727} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.554077] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 935.554879] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0348587-4e22-46ad-81bc-b9ee76d48971 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.576942] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Reconfiguring VM instance instance-0000004d to attach disk [datastore1] b21ff3c9-d53a-4065-a271-682c2f1b895d/b21ff3c9-d53a-4065-a271-682c2f1b895d.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 935.579690] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ffff59ec-2bb9-46d4-a17a-fba8afb8209e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.603814] env[69328]: DEBUG oslo_vmware.api [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 935.603814] env[69328]: value = "task-3273590" [ 935.603814] env[69328]: _type = "Task" [ 935.603814] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.613903] env[69328]: DEBUG oslo_vmware.api [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273590, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.695270] env[69328]: DEBUG nova.network.neutron [req-f35d43fb-bcc9-41c9-a9e0-4f670932c38d req-845fce42-1ea7-4e96-9359-05ae5fd17aa7 service nova] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Updated VIF entry in instance network info cache for port c68f39c4-deae-4739-bc3f-9284775789c4. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 935.695855] env[69328]: DEBUG nova.network.neutron [req-f35d43fb-bcc9-41c9-a9e0-4f670932c38d req-845fce42-1ea7-4e96-9359-05ae5fd17aa7 service nova] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Updating instance_info_cache with network_info: [{"id": "c68f39c4-deae-4739-bc3f-9284775789c4", "address": "fa:16:3e:ea:9c:3d", "network": {"id": "620f277d-ca49-41da-83a0-afb8c393e26e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1223326239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cdc479a290524130b9d17e627a64b65a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86a35d07-53d3-46b3-92cb-ae34236c0f41", "external-id": "nsx-vlan-transportzone-811", "segmentation_id": 811, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc68f39c4-de", "ovs_interfaceid": "c68f39c4-deae-4739-bc3f-9284775789c4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 935.698190] env[69328]: DEBUG nova.compute.utils [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 935.703420] env[69328]: DEBUG nova.compute.manager [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 935.703585] env[69328]: DEBUG nova.network.neutron [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 935.766681] env[69328]: DEBUG nova.policy [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f4dc7c56b3cb4e5b943b54ebe16a23df', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1393040bf5304571ae4b66d0a4ee7b6e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 935.885415] env[69328]: DEBUG oslo_vmware.api [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273584, 'name': Rename_Task, 'duration_secs': 1.232243} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.886279] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 935.886711] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-862a246e-8450-48d9-991d-f5064f06f8e1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.897740] env[69328]: DEBUG oslo_vmware.api [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 935.897740] env[69328]: value = "task-3273591" [ 935.897740] env[69328]: _type = "Task" [ 935.897740] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.908308] env[69328]: DEBUG oslo_vmware.api [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273591, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.991085] env[69328]: DEBUG oslo_vmware.api [None req-6c230130-bc6e-4335-a78d-25907c763e95 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273588, 'name': ReconfigVM_Task, 'duration_secs': 0.306766} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.991296] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c230130-bc6e-4335-a78d-25907c763e95 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 935.993900] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-805dfb27-8856-4d43-b241-df51c5b1af00 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.004635] env[69328]: DEBUG oslo_vmware.api [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52435cab-75e8-dde5-6f24-33d75acb000c, 'name': SearchDatastore_Task, 'duration_secs': 0.010002} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.006501] env[69328]: DEBUG oslo_vmware.api [None req-6c230130-bc6e-4335-a78d-25907c763e95 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 936.006501] env[69328]: value = "task-3273592" [ 936.006501] env[69328]: _type = "Task" [ 936.006501] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.006709] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8408e8d7-88e7-4175-b476-758fb7fce303 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.018150] env[69328]: DEBUG oslo_vmware.api [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 936.018150] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ba2f21-12d8-6844-25c3-d86a266dfb7f" [ 936.018150] env[69328]: _type = "Task" [ 936.018150] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.021955] env[69328]: DEBUG oslo_vmware.api [None req-6c230130-bc6e-4335-a78d-25907c763e95 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273592, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.035099] env[69328]: DEBUG oslo_vmware.api [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ba2f21-12d8-6844-25c3-d86a266dfb7f, 'name': SearchDatastore_Task, 'duration_secs': 0.011428} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.038647] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 936.038647] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 0a485411-3206-4674-90e4-58df4a8b755a/0a485411-3206-4674-90e4-58df4a8b755a.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 936.038847] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b8535e58-abc2-43ef-82a9-7380b9c5c3d8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.049250] env[69328]: DEBUG oslo_vmware.api [None req-cc57710c-7228-448c-9e4f-bc5e76ac2170 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Task: {'id': task-3273589, 'name': PowerOffVM_Task, 'duration_secs': 0.301297} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.050594] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc57710c-7228-448c-9e4f-bc5e76ac2170 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 936.050800] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-cc57710c-7228-448c-9e4f-bc5e76ac2170 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 936.051240] env[69328]: DEBUG oslo_vmware.api [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 936.051240] env[69328]: value = "task-3273593" [ 936.051240] env[69328]: _type = "Task" [ 936.051240] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.051366] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5e2e258c-cf4e-4dcf-81a3-9323c0d29e6b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.066099] env[69328]: DEBUG oslo_vmware.api [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273593, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.093438] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ba5a800-85e2-481d-b085-5e259f72b8cb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.102882] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85a91d7f-825b-44df-b199-7cb2241ff685 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.116998] env[69328]: DEBUG oslo_vmware.api [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273590, 'name': ReconfigVM_Task, 'duration_secs': 0.298059} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.144430] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Reconfigured VM instance instance-0000004d to attach disk [datastore1] b21ff3c9-d53a-4065-a271-682c2f1b895d/b21ff3c9-d53a-4065-a271-682c2f1b895d.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 936.145229] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cbed3959-f892-4b31-bfe9-b5ac45c08621 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.147669] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3133c615-fad2-44a0-b552-32334ffe5a7f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.156975] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba3f016c-6fb1-4cdb-82d1-9e0043e26cc1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.161068] env[69328]: DEBUG oslo_vmware.api [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 936.161068] env[69328]: value = "task-3273595" [ 936.161068] env[69328]: _type = "Task" [ 936.161068] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.173130] env[69328]: DEBUG nova.compute.provider_tree [None req-039d3904-cd51-4387-9b2a-bb7e74f42aee tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 936.181215] env[69328]: DEBUG oslo_vmware.api [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273595, 'name': Rename_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.199743] env[69328]: DEBUG nova.network.neutron [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] Successfully created port: 846c98df-5570-4e8b-8e8b-353bf9825281 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 936.204206] env[69328]: DEBUG nova.compute.manager [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 936.207716] env[69328]: DEBUG oslo_concurrency.lockutils [req-f35d43fb-bcc9-41c9-a9e0-4f670932c38d req-845fce42-1ea7-4e96-9359-05ae5fd17aa7 service nova] Releasing lock "refresh_cache-0a485411-3206-4674-90e4-58df4a8b755a" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 936.223877] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-cc57710c-7228-448c-9e4f-bc5e76ac2170 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 936.223877] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-cc57710c-7228-448c-9e4f-bc5e76ac2170 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 936.223877] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc57710c-7228-448c-9e4f-bc5e76ac2170 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Deleting the datastore file [datastore1] 1f568ba1-8591-499b-b1ee-da16e26f81fc {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 936.226164] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-06adecb0-cd5f-4e29-8821-b9b8ef88694d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.235053] env[69328]: DEBUG oslo_vmware.api [None req-cc57710c-7228-448c-9e4f-bc5e76ac2170 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Waiting for the task: (returnval){ [ 936.235053] env[69328]: value = "task-3273596" [ 936.235053] env[69328]: _type = "Task" [ 936.235053] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.248019] env[69328]: DEBUG oslo_vmware.api [None req-cc57710c-7228-448c-9e4f-bc5e76ac2170 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Task: {'id': task-3273596, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.410805] env[69328]: DEBUG oslo_vmware.api [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273591, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.525112] env[69328]: DEBUG oslo_vmware.api [None req-6c230130-bc6e-4335-a78d-25907c763e95 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273592, 'name': PowerOnVM_Task, 'duration_secs': 0.488278} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.525521] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c230130-bc6e-4335-a78d-25907c763e95 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 936.529705] env[69328]: DEBUG nova.compute.manager [None req-6c230130-bc6e-4335-a78d-25907c763e95 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 936.530886] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d818451-1cb9-403a-bc89-57e5fa05a07c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.564669] env[69328]: DEBUG oslo_vmware.api [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273593, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.50871} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.564817] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 0a485411-3206-4674-90e4-58df4a8b755a/0a485411-3206-4674-90e4-58df4a8b755a.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 936.565050] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 936.565337] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e9e117ae-4efe-4fc4-b36a-7aa331509604 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.573149] env[69328]: DEBUG oslo_vmware.api [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 936.573149] env[69328]: value = "task-3273597" [ 936.573149] env[69328]: _type = "Task" [ 936.573149] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.584888] env[69328]: DEBUG oslo_vmware.api [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273597, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.671051] env[69328]: DEBUG oslo_vmware.api [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273595, 'name': Rename_Task, 'duration_secs': 0.2653} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.671349] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 936.671624] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-58d52028-7615-472e-a363-ea59e1daa6e6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.676291] env[69328]: DEBUG nova.scheduler.client.report [None req-039d3904-cd51-4387-9b2a-bb7e74f42aee tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 936.680656] env[69328]: DEBUG oslo_vmware.api [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 936.680656] env[69328]: value = "task-3273598" [ 936.680656] env[69328]: _type = "Task" [ 936.680656] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.689118] env[69328]: DEBUG oslo_vmware.api [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273598, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.747045] env[69328]: DEBUG oslo_vmware.api [None req-cc57710c-7228-448c-9e4f-bc5e76ac2170 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Task: {'id': task-3273596, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.431757} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.747805] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc57710c-7228-448c-9e4f-bc5e76ac2170 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 936.748012] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-cc57710c-7228-448c-9e4f-bc5e76ac2170 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 936.748197] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-cc57710c-7228-448c-9e4f-bc5e76ac2170 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 936.748372] env[69328]: INFO nova.compute.manager [None req-cc57710c-7228-448c-9e4f-bc5e76ac2170 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Took 1.23 seconds to destroy the instance on the hypervisor. [ 936.748631] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cc57710c-7228-448c-9e4f-bc5e76ac2170 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 936.748832] env[69328]: DEBUG nova.compute.manager [-] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 936.748920] env[69328]: DEBUG nova.network.neutron [-] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 936.910499] env[69328]: DEBUG oslo_vmware.api [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273591, 'name': PowerOnVM_Task, 'duration_secs': 0.620713} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.910499] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 936.910499] env[69328]: INFO nova.compute.manager [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Took 9.07 seconds to spawn the instance on the hypervisor. [ 936.910499] env[69328]: DEBUG nova.compute.manager [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 936.911194] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb20fc35-2db9-49d1-92df-f2b3efb86d24 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.054649] env[69328]: DEBUG nova.compute.manager [req-40aa028d-02e9-4e58-bb1f-db061468976a req-31fd20bf-53f1-4fc4-9ab1-b2a3ebf31960 service nova] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Received event network-vif-deleted-43596910-7f9b-47c0-972f-1dd3d779373c {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 937.054845] env[69328]: INFO nova.compute.manager [req-40aa028d-02e9-4e58-bb1f-db061468976a req-31fd20bf-53f1-4fc4-9ab1-b2a3ebf31960 service nova] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Neutron deleted interface 43596910-7f9b-47c0-972f-1dd3d779373c; detaching it from the instance and deleting it from the info cache [ 937.055128] env[69328]: DEBUG nova.network.neutron [req-40aa028d-02e9-4e58-bb1f-db061468976a req-31fd20bf-53f1-4fc4-9ab1-b2a3ebf31960 service nova] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Updating instance_info_cache with network_info: [{"id": "958e374d-7706-40d9-aac0-ee00bd3140f0", "address": "fa:16:3e:27:42:2c", "network": {"id": "503b721a-0340-4a37-a93c-2106abfa16b5", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-893160243", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.189", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "f71a072b33154efe9636b50e25f93381", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bf86b133-2b7b-4cab-8f6f-5a0856d34c7b", "external-id": "nsx-vlan-transportzone-557", "segmentation_id": 557, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap958e374d-77", "ovs_interfaceid": "958e374d-7706-40d9-aac0-ee00bd3140f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.083013] env[69328]: DEBUG oslo_vmware.api [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273597, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070291} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.083285] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 937.084100] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20e2aebd-4cc2-4c3b-b9d5-808a2cf7611d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.106557] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] 0a485411-3206-4674-90e4-58df4a8b755a/0a485411-3206-4674-90e4-58df4a8b755a.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 937.107113] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1081492f-a4d0-4c59-b59c-37cbe5e97742 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.127126] env[69328]: DEBUG oslo_vmware.api [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 937.127126] env[69328]: value = "task-3273599" [ 937.127126] env[69328]: _type = "Task" [ 937.127126] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.135448] env[69328]: DEBUG oslo_vmware.api [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273599, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.186492] env[69328]: DEBUG oslo_concurrency.lockutils [None req-039d3904-cd51-4387-9b2a-bb7e74f42aee tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.994s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 937.189080] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0790747f-001b-4009-889e-af44a3ab0781 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.286s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 937.189313] env[69328]: DEBUG nova.objects.instance [None req-0790747f-001b-4009-889e-af44a3ab0781 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lazy-loading 'resources' on Instance uuid 20f750d7-1914-49bb-802f-464a30ffcf3a {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 937.196566] env[69328]: DEBUG oslo_vmware.api [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273598, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.210965] env[69328]: INFO nova.scheduler.client.report [None req-039d3904-cd51-4387-9b2a-bb7e74f42aee tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Deleted allocations for instance d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8 [ 937.215284] env[69328]: DEBUG nova.compute.manager [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 937.243017] env[69328]: DEBUG nova.virt.hardware [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 937.243221] env[69328]: DEBUG nova.virt.hardware [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 937.243707] env[69328]: DEBUG nova.virt.hardware [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 937.243707] env[69328]: DEBUG nova.virt.hardware [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 937.243854] env[69328]: DEBUG nova.virt.hardware [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 937.243915] env[69328]: DEBUG nova.virt.hardware [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 937.244103] env[69328]: DEBUG nova.virt.hardware [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 937.244269] env[69328]: DEBUG nova.virt.hardware [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 937.245022] env[69328]: DEBUG nova.virt.hardware [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 937.245022] env[69328]: DEBUG nova.virt.hardware [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 937.245022] env[69328]: DEBUG nova.virt.hardware [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 937.245664] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f182d275-d9e6-43ab-aa8f-144b10d96d98 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.255387] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d79689c2-bb33-4971-93fd-53cc3abed497 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.434904] env[69328]: INFO nova.compute.manager [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Took 32.16 seconds to build instance. [ 937.558875] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-161cefe1-58bb-407d-8965-40a17524cf98 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.570775] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56c7f510-752c-42b2-986d-492bb838e627 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.610441] env[69328]: DEBUG nova.compute.manager [req-40aa028d-02e9-4e58-bb1f-db061468976a req-31fd20bf-53f1-4fc4-9ab1-b2a3ebf31960 service nova] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Detach interface failed, port_id=43596910-7f9b-47c0-972f-1dd3d779373c, reason: Instance 1f568ba1-8591-499b-b1ee-da16e26f81fc could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 937.638201] env[69328]: DEBUG oslo_vmware.api [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273599, 'name': ReconfigVM_Task, 'duration_secs': 0.278667} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.638608] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Reconfigured VM instance instance-0000004e to attach disk [datastore1] 0a485411-3206-4674-90e4-58df4a8b755a/0a485411-3206-4674-90e4-58df4a8b755a.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 937.639247] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6e010f70-89bb-459e-bc0e-8dfb90065c0f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.661047] env[69328]: DEBUG oslo_vmware.api [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 937.661047] env[69328]: value = "task-3273600" [ 937.661047] env[69328]: _type = "Task" [ 937.661047] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.670086] env[69328]: DEBUG oslo_vmware.api [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273600, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.689590] env[69328]: DEBUG nova.network.neutron [-] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.692016] env[69328]: DEBUG oslo_vmware.api [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273598, 'name': PowerOnVM_Task, 'duration_secs': 0.998824} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.694925] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 937.694983] env[69328]: INFO nova.compute.manager [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Took 7.37 seconds to spawn the instance on the hypervisor. [ 937.695183] env[69328]: DEBUG nova.compute.manager [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 937.696285] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9757df8-f094-4fa2-ba1b-1bf1c3247704 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.722048] env[69328]: DEBUG oslo_concurrency.lockutils [None req-039d3904-cd51-4387-9b2a-bb7e74f42aee tempest-MigrationsAdminTest-499172505 tempest-MigrationsAdminTest-499172505-project-member] Lock "d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.883s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 937.911270] env[69328]: DEBUG nova.compute.manager [req-73f5ded2-d97c-496d-b411-c794aa1a1971 req-1f798822-46d3-49a9-b596-5608ca572fae service nova] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] Received event network-vif-plugged-846c98df-5570-4e8b-8e8b-353bf9825281 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 937.911478] env[69328]: DEBUG oslo_concurrency.lockutils [req-73f5ded2-d97c-496d-b411-c794aa1a1971 req-1f798822-46d3-49a9-b596-5608ca572fae service nova] Acquiring lock "82e27131-b401-4885-83fb-825e5c8e2444-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 937.911745] env[69328]: DEBUG oslo_concurrency.lockutils [req-73f5ded2-d97c-496d-b411-c794aa1a1971 req-1f798822-46d3-49a9-b596-5608ca572fae service nova] Lock "82e27131-b401-4885-83fb-825e5c8e2444-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 937.911914] env[69328]: DEBUG oslo_concurrency.lockutils [req-73f5ded2-d97c-496d-b411-c794aa1a1971 req-1f798822-46d3-49a9-b596-5608ca572fae service nova] Lock "82e27131-b401-4885-83fb-825e5c8e2444-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 937.912277] env[69328]: DEBUG nova.compute.manager [req-73f5ded2-d97c-496d-b411-c794aa1a1971 req-1f798822-46d3-49a9-b596-5608ca572fae service nova] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] No waiting events found dispatching network-vif-plugged-846c98df-5570-4e8b-8e8b-353bf9825281 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 937.912789] env[69328]: WARNING nova.compute.manager [req-73f5ded2-d97c-496d-b411-c794aa1a1971 req-1f798822-46d3-49a9-b596-5608ca572fae service nova] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] Received unexpected event network-vif-plugged-846c98df-5570-4e8b-8e8b-353bf9825281 for instance with vm_state building and task_state spawning. [ 937.937866] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8a40b511-f5d5-4bc4-8821-1b475ae478da tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lock "76210566-12d7-4f6a-afa1-6329e87e0f85" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.665s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 938.028251] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d914c1e8-178f-4840-9ee3-638bd82af81e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.036756] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-557e83cb-689d-4d47-be3f-2ea1fac58f51 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.072520] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efb817f6-8763-45b2-a66f-30d8b6d08cc5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.083232] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a255fc2e-82bb-4736-864b-c95531b66bb7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.098057] env[69328]: DEBUG nova.compute.provider_tree [None req-0790747f-001b-4009-889e-af44a3ab0781 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 938.173386] env[69328]: DEBUG oslo_vmware.api [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273600, 'name': Rename_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.193051] env[69328]: INFO nova.compute.manager [-] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Took 1.44 seconds to deallocate network for instance. [ 938.214482] env[69328]: INFO nova.compute.manager [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Took 25.30 seconds to build instance. [ 938.293311] env[69328]: DEBUG nova.network.neutron [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] Successfully updated port: 846c98df-5570-4e8b-8e8b-353bf9825281 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 938.319664] env[69328]: DEBUG nova.compute.manager [req-0062159d-6758-4047-8e63-19f017e13983 req-18294666-daba-4748-8555-ef038a530238 service nova] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] Received event network-changed-846c98df-5570-4e8b-8e8b-353bf9825281 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 938.319664] env[69328]: DEBUG nova.compute.manager [req-0062159d-6758-4047-8e63-19f017e13983 req-18294666-daba-4748-8555-ef038a530238 service nova] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] Refreshing instance network info cache due to event network-changed-846c98df-5570-4e8b-8e8b-353bf9825281. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 938.319664] env[69328]: DEBUG oslo_concurrency.lockutils [req-0062159d-6758-4047-8e63-19f017e13983 req-18294666-daba-4748-8555-ef038a530238 service nova] Acquiring lock "refresh_cache-82e27131-b401-4885-83fb-825e5c8e2444" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 938.319664] env[69328]: DEBUG oslo_concurrency.lockutils [req-0062159d-6758-4047-8e63-19f017e13983 req-18294666-daba-4748-8555-ef038a530238 service nova] Acquired lock "refresh_cache-82e27131-b401-4885-83fb-825e5c8e2444" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 938.319848] env[69328]: DEBUG nova.network.neutron [req-0062159d-6758-4047-8e63-19f017e13983 req-18294666-daba-4748-8555-ef038a530238 service nova] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] Refreshing network info cache for port 846c98df-5570-4e8b-8e8b-353bf9825281 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 938.604070] env[69328]: DEBUG nova.scheduler.client.report [None req-0790747f-001b-4009-889e-af44a3ab0781 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 938.678593] env[69328]: DEBUG oslo_vmware.api [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273600, 'name': Rename_Task, 'duration_secs': 0.882628} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.678876] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 938.679142] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ea1825ad-5090-4f8f-9c1c-2a6c27f380c7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.688373] env[69328]: DEBUG oslo_vmware.api [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 938.688373] env[69328]: value = "task-3273601" [ 938.688373] env[69328]: _type = "Task" [ 938.688373] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.701024] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cc57710c-7228-448c-9e4f-bc5e76ac2170 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 938.701024] env[69328]: DEBUG oslo_vmware.api [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273601, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.717019] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3205bb32-01ac-43d7-95b9-e680f5bcb705 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lock "b21ff3c9-d53a-4065-a271-682c2f1b895d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.811s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 938.797344] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Acquiring lock "refresh_cache-82e27131-b401-4885-83fb-825e5c8e2444" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 938.857182] env[69328]: DEBUG nova.network.neutron [req-0062159d-6758-4047-8e63-19f017e13983 req-18294666-daba-4748-8555-ef038a530238 service nova] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 939.014744] env[69328]: DEBUG nova.network.neutron [req-0062159d-6758-4047-8e63-19f017e13983 req-18294666-daba-4748-8555-ef038a530238 service nova] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 939.089945] env[69328]: DEBUG nova.compute.manager [req-9ef917a4-e960-461d-b9c0-e0dbd567d1ad req-1dd80c07-2312-4a39-982c-52bef1cb88e4 service nova] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Received event network-vif-deleted-958e374d-7706-40d9-aac0-ee00bd3140f0 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 939.090122] env[69328]: DEBUG nova.compute.manager [req-9ef917a4-e960-461d-b9c0-e0dbd567d1ad req-1dd80c07-2312-4a39-982c-52bef1cb88e4 service nova] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Received event network-changed-a3cab44b-0572-4007-bab9-e84ba084f70a {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 939.090604] env[69328]: DEBUG nova.compute.manager [req-9ef917a4-e960-461d-b9c0-e0dbd567d1ad req-1dd80c07-2312-4a39-982c-52bef1cb88e4 service nova] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Refreshing instance network info cache due to event network-changed-a3cab44b-0572-4007-bab9-e84ba084f70a. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 939.090685] env[69328]: DEBUG oslo_concurrency.lockutils [req-9ef917a4-e960-461d-b9c0-e0dbd567d1ad req-1dd80c07-2312-4a39-982c-52bef1cb88e4 service nova] Acquiring lock "refresh_cache-76210566-12d7-4f6a-afa1-6329e87e0f85" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.090839] env[69328]: DEBUG oslo_concurrency.lockutils [req-9ef917a4-e960-461d-b9c0-e0dbd567d1ad req-1dd80c07-2312-4a39-982c-52bef1cb88e4 service nova] Acquired lock "refresh_cache-76210566-12d7-4f6a-afa1-6329e87e0f85" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 939.091036] env[69328]: DEBUG nova.network.neutron [req-9ef917a4-e960-461d-b9c0-e0dbd567d1ad req-1dd80c07-2312-4a39-982c-52bef1cb88e4 service nova] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Refreshing network info cache for port a3cab44b-0572-4007-bab9-e84ba084f70a {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 939.107885] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0790747f-001b-4009-889e-af44a3ab0781 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.919s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 939.111769] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b6e80060-f62c-47bc-822b-0bdeae508a3d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 13.609s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 939.138902] env[69328]: INFO nova.scheduler.client.report [None req-0790747f-001b-4009-889e-af44a3ab0781 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Deleted allocations for instance 20f750d7-1914-49bb-802f-464a30ffcf3a [ 939.171970] env[69328]: INFO nova.compute.manager [None req-89d7d3a7-990c-4ccd-817b-bc9681fc8409 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Rescuing [ 939.173408] env[69328]: DEBUG oslo_concurrency.lockutils [None req-89d7d3a7-990c-4ccd-817b-bc9681fc8409 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquiring lock "refresh_cache-c751ef77-c3be-46cd-b7eb-fe139bf0998b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.173408] env[69328]: DEBUG oslo_concurrency.lockutils [None req-89d7d3a7-990c-4ccd-817b-bc9681fc8409 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquired lock "refresh_cache-c751ef77-c3be-46cd-b7eb-fe139bf0998b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 939.173408] env[69328]: DEBUG nova.network.neutron [None req-89d7d3a7-990c-4ccd-817b-bc9681fc8409 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 939.203044] env[69328]: DEBUG oslo_vmware.api [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273601, 'name': PowerOnVM_Task} progress is 96%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.519271] env[69328]: DEBUG oslo_concurrency.lockutils [req-0062159d-6758-4047-8e63-19f017e13983 req-18294666-daba-4748-8555-ef038a530238 service nova] Releasing lock "refresh_cache-82e27131-b401-4885-83fb-825e5c8e2444" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 939.519635] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Acquired lock "refresh_cache-82e27131-b401-4885-83fb-825e5c8e2444" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 939.520614] env[69328]: DEBUG nova.network.neutron [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 939.652852] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0790747f-001b-4009-889e-af44a3ab0781 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "20f750d7-1914-49bb-802f-464a30ffcf3a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.196s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 939.704025] env[69328]: DEBUG oslo_vmware.api [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273601, 'name': PowerOnVM_Task, 'duration_secs': 0.689693} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.704025] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 939.704025] env[69328]: INFO nova.compute.manager [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Took 6.97 seconds to spawn the instance on the hypervisor. [ 939.704025] env[69328]: DEBUG nova.compute.manager [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 939.704590] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-275bae1d-712d-46c0-98d7-d97494ef1f1c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.889212] env[69328]: DEBUG nova.network.neutron [req-9ef917a4-e960-461d-b9c0-e0dbd567d1ad req-1dd80c07-2312-4a39-982c-52bef1cb88e4 service nova] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Updated VIF entry in instance network info cache for port a3cab44b-0572-4007-bab9-e84ba084f70a. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 939.889629] env[69328]: DEBUG nova.network.neutron [req-9ef917a4-e960-461d-b9c0-e0dbd567d1ad req-1dd80c07-2312-4a39-982c-52bef1cb88e4 service nova] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Updating instance_info_cache with network_info: [{"id": "a3cab44b-0572-4007-bab9-e84ba084f70a", "address": "fa:16:3e:02:7d:25", "network": {"id": "3be6b159-5d5c-4752-b79d-0ed6110569d0", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-915151552-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.128", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f50ac50ef6ae4abc83a8064746de7029", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe20ef0e-0991-44d7-887d-08dddac0b56b", "external-id": "nsx-vlan-transportzone-991", "segmentation_id": 991, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3cab44b-05", "ovs_interfaceid": "a3cab44b-0572-4007-bab9-e84ba084f70a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 939.907980] env[69328]: DEBUG nova.network.neutron [None req-89d7d3a7-990c-4ccd-817b-bc9681fc8409 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Updating instance_info_cache with network_info: [{"id": "2a3862dd-bd04-40ed-9d66-1fa2418297ea", "address": "fa:16:3e:ad:2d:2a", "network": {"id": "bd41ac10-1850-45a2-8e46-6ebdca3d8e13", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-766393176-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efd0e2d2f9ba4416bd8fd08dad912465", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e614f8e-6b11-4b6b-a421-904bca6acd91", "external-id": "nsx-vlan-transportzone-923", "segmentation_id": 923, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a3862dd-bd", "ovs_interfaceid": "2a3862dd-bd04-40ed-9d66-1fa2418297ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 939.959475] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-742d534c-f130-413d-abd3-f4c23c3452fe {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.967992] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e33eb0f6-d98f-4ab5-b5ce-13b7f391e7c7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.003405] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbc81947-dff0-4436-a7aa-1d6f3a29bcb6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.011626] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fe95e34-159c-4e32-902c-8b13d19d28c1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.030493] env[69328]: DEBUG nova.compute.provider_tree [None req-b6e80060-f62c-47bc-822b-0bdeae508a3d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 940.089248] env[69328]: DEBUG nova.network.neutron [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 940.229279] env[69328]: INFO nova.compute.manager [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Took 26.54 seconds to build instance. [ 940.329430] env[69328]: DEBUG nova.network.neutron [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] Updating instance_info_cache with network_info: [{"id": "846c98df-5570-4e8b-8e8b-353bf9825281", "address": "fa:16:3e:e1:1b:2e", "network": {"id": "cc75e08f-f0f3-4b52-9b40-0de73f044554", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1326858830-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1393040bf5304571ae4b66d0a4ee7b6e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap846c98df-55", "ovs_interfaceid": "846c98df-5570-4e8b-8e8b-353bf9825281", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 940.396596] env[69328]: DEBUG oslo_concurrency.lockutils [req-9ef917a4-e960-461d-b9c0-e0dbd567d1ad req-1dd80c07-2312-4a39-982c-52bef1cb88e4 service nova] Releasing lock "refresh_cache-76210566-12d7-4f6a-afa1-6329e87e0f85" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 940.411351] env[69328]: DEBUG oslo_concurrency.lockutils [None req-89d7d3a7-990c-4ccd-817b-bc9681fc8409 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Releasing lock "refresh_cache-c751ef77-c3be-46cd-b7eb-fe139bf0998b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 940.533381] env[69328]: DEBUG nova.compute.manager [None req-f94e39fa-c995-4316-94a4-8df2c241aba1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 940.534265] env[69328]: DEBUG nova.scheduler.client.report [None req-b6e80060-f62c-47bc-822b-0bdeae508a3d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 940.539357] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-333a97ea-d13c-431c-ad60-73904de78b7f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.731679] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6d5f2762-ed03-447b-999c-e30f6a430d67 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "0a485411-3206-4674-90e4-58df4a8b755a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.054s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 940.834986] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Releasing lock "refresh_cache-82e27131-b401-4885-83fb-825e5c8e2444" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 940.834986] env[69328]: DEBUG nova.compute.manager [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] Instance network_info: |[{"id": "846c98df-5570-4e8b-8e8b-353bf9825281", "address": "fa:16:3e:e1:1b:2e", "network": {"id": "cc75e08f-f0f3-4b52-9b40-0de73f044554", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1326858830-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1393040bf5304571ae4b66d0a4ee7b6e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap846c98df-55", "ovs_interfaceid": "846c98df-5570-4e8b-8e8b-353bf9825281", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 940.834986] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e1:1b:2e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '10b81051-1eb1-406b-888c-4548c470c77e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '846c98df-5570-4e8b-8e8b-353bf9825281', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 940.849384] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 940.849934] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 940.850194] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-657b3970-f8a4-45cb-89bb-cd9114cd37cf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.878323] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 940.878323] env[69328]: value = "task-3273602" [ 940.878323] env[69328]: _type = "Task" [ 940.878323] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.886881] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273602, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.982603] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "9ad2b2e3-460a-403e-bfc7-f46648c93849" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 940.982891] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "9ad2b2e3-460a-403e-bfc7-f46648c93849" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 941.052776] env[69328]: INFO nova.compute.manager [None req-f94e39fa-c995-4316-94a4-8df2c241aba1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] instance snapshotting [ 941.055581] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22e65d3a-9777-4d6d-809a-5436f903ad98 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.077956] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14854ab5-a721-421f-a21f-580f0a121977 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.296548] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9cc24f8b-1cee-4304-95fa-957936787710 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "0a485411-3206-4674-90e4-58df4a8b755a" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 941.296771] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9cc24f8b-1cee-4304-95fa-957936787710 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "0a485411-3206-4674-90e4-58df4a8b755a" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 941.296960] env[69328]: DEBUG nova.compute.manager [None req-9cc24f8b-1cee-4304-95fa-957936787710 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 941.298236] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b155c2e8-8245-4ba8-a5e0-f7dffcc3f57a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.306050] env[69328]: DEBUG nova.compute.manager [None req-9cc24f8b-1cee-4304-95fa-957936787710 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69328) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 941.306623] env[69328]: DEBUG nova.objects.instance [None req-9cc24f8b-1cee-4304-95fa-957936787710 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lazy-loading 'flavor' on Instance uuid 0a485411-3206-4674-90e4-58df4a8b755a {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 941.394198] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273602, 'name': CreateVM_Task} progress is 25%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.485776] env[69328]: DEBUG nova.compute.manager [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 941.551017] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b6e80060-f62c-47bc-822b-0bdeae508a3d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.438s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 941.559110] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7b54d2e5-71f6-4aae-aebe-2a4213df214c tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.613s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 941.559110] env[69328]: DEBUG nova.objects.instance [None req-7b54d2e5-71f6-4aae-aebe-2a4213df214c tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Lazy-loading 'resources' on Instance uuid 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 941.591407] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f94e39fa-c995-4316-94a4-8df2c241aba1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Creating Snapshot of the VM instance {{(pid=69328) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 941.592593] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-95a213ed-1cc9-472d-a2de-60de6b4326a3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.603815] env[69328]: DEBUG oslo_vmware.api [None req-f94e39fa-c995-4316-94a4-8df2c241aba1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 941.603815] env[69328]: value = "task-3273603" [ 941.603815] env[69328]: _type = "Task" [ 941.603815] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.612694] env[69328]: DEBUG oslo_vmware.api [None req-f94e39fa-c995-4316-94a4-8df2c241aba1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273603, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.893372] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273602, 'name': CreateVM_Task, 'duration_secs': 0.694761} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.893372] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 941.893372] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 941.893372] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 941.893372] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 941.894392] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd00e417-38d9-47bb-8792-836fc185b745 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.901496] env[69328]: DEBUG oslo_vmware.api [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Waiting for the task: (returnval){ [ 941.901496] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]526fa75b-17c9-aae8-278e-0f41e730f960" [ 941.901496] env[69328]: _type = "Task" [ 941.901496] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.914759] env[69328]: DEBUG oslo_vmware.api [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]526fa75b-17c9-aae8-278e-0f41e730f960, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.953222] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-89d7d3a7-990c-4ccd-817b-bc9681fc8409 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 941.953639] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ac3a285c-587f-4b3d-98d5-3cd38c9a483d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.963078] env[69328]: DEBUG oslo_vmware.api [None req-89d7d3a7-990c-4ccd-817b-bc9681fc8409 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 941.963078] env[69328]: value = "task-3273604" [ 941.963078] env[69328]: _type = "Task" [ 941.963078] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.973710] env[69328]: DEBUG oslo_vmware.api [None req-89d7d3a7-990c-4ccd-817b-bc9681fc8409 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273604, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.014290] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 942.118554] env[69328]: DEBUG oslo_vmware.api [None req-f94e39fa-c995-4316-94a4-8df2c241aba1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273603, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.125300] env[69328]: INFO nova.scheduler.client.report [None req-b6e80060-f62c-47bc-822b-0bdeae508a3d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Deleted allocation for migration e58a0bcf-9f68-4aec-b3e1-10cfbcb5ca51 [ 942.317730] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cc24f8b-1cee-4304-95fa-957936787710 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 942.317886] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fa91c37e-fea1-40cf-be42-4287d35a6d53 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.331621] env[69328]: DEBUG oslo_vmware.api [None req-9cc24f8b-1cee-4304-95fa-957936787710 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 942.331621] env[69328]: value = "task-3273605" [ 942.331621] env[69328]: _type = "Task" [ 942.331621] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.347560] env[69328]: DEBUG oslo_vmware.api [None req-9cc24f8b-1cee-4304-95fa-957936787710 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273605, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.414024] env[69328]: DEBUG oslo_vmware.api [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]526fa75b-17c9-aae8-278e-0f41e730f960, 'name': SearchDatastore_Task, 'duration_secs': 0.011818} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.414596] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 942.415304] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 942.415304] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.415304] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 942.415528] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 942.418374] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a8e042ce-304a-4b44-9a48-1ebb83f9d9a1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.429343] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 942.429343] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 942.430976] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c1e91af-e605-4d96-a019-64c6ff2a49fc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.434453] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8648dd4-530d-4b47-b131-4f73004ed999 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.441726] env[69328]: DEBUG oslo_vmware.api [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Waiting for the task: (returnval){ [ 942.441726] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]528670dd-6fe7-65d6-cfc1-a1b54a8285ad" [ 942.441726] env[69328]: _type = "Task" [ 942.441726] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.447705] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae3c0c5d-7fb4-47ad-ade2-86b0e8de8f75 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.457012] env[69328]: DEBUG oslo_vmware.api [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]528670dd-6fe7-65d6-cfc1-a1b54a8285ad, 'name': SearchDatastore_Task, 'duration_secs': 0.009858} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.487503] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13323ba8-257c-4e9e-a500-bc794c29c020 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.492772] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5c221bc-2524-45ab-9694-287269cc6971 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.501354] env[69328]: DEBUG oslo_vmware.api [None req-89d7d3a7-990c-4ccd-817b-bc9681fc8409 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273604, 'name': PowerOffVM_Task, 'duration_secs': 0.335459} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.506432] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-89d7d3a7-990c-4ccd-817b-bc9681fc8409 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 942.506432] env[69328]: DEBUG oslo_vmware.api [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Waiting for the task: (returnval){ [ 942.506432] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52a307de-0cfe-4e16-fa35-c8738ce7518e" [ 942.506432] env[69328]: _type = "Task" [ 942.506432] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.506432] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d03ee761-0bd4-42a5-9fe5-85d920abb198 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.510221] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85fbcec1-2a92-4e54-a1d5-fa852e62314e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.540346] env[69328]: DEBUG oslo_vmware.api [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52a307de-0cfe-4e16-fa35-c8738ce7518e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.550342] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d48f065-33fe-421c-8a88-277ad918b281 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.554283] env[69328]: DEBUG nova.compute.provider_tree [None req-7b54d2e5-71f6-4aae-aebe-2a4213df214c tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 942.595190] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-89d7d3a7-990c-4ccd-817b-bc9681fc8409 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 942.598691] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-aa780cf9-5768-460d-b3f9-22bee70b9947 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.607137] env[69328]: DEBUG oslo_vmware.api [None req-89d7d3a7-990c-4ccd-817b-bc9681fc8409 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 942.607137] env[69328]: value = "task-3273606" [ 942.607137] env[69328]: _type = "Task" [ 942.607137] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.623911] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-89d7d3a7-990c-4ccd-817b-bc9681fc8409 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] VM already powered off {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 942.624176] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-89d7d3a7-990c-4ccd-817b-bc9681fc8409 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 942.624438] env[69328]: DEBUG oslo_concurrency.lockutils [None req-89d7d3a7-990c-4ccd-817b-bc9681fc8409 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.624590] env[69328]: DEBUG oslo_concurrency.lockutils [None req-89d7d3a7-990c-4ccd-817b-bc9681fc8409 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 942.624769] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-89d7d3a7-990c-4ccd-817b-bc9681fc8409 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 942.625053] env[69328]: DEBUG oslo_vmware.api [None req-f94e39fa-c995-4316-94a4-8df2c241aba1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273603, 'name': CreateSnapshot_Task, 'duration_secs': 1.016981} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.625262] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f12166eb-0504-4953-bab5-cf23dcae8e82 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.627150] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f94e39fa-c995-4316-94a4-8df2c241aba1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Created Snapshot of the VM instance {{(pid=69328) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 942.628245] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-479c6f58-b7d7-4253-8497-00dd6ddf5bce {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.638101] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b6e80060-f62c-47bc-822b-0bdeae508a3d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "fd72bae3-cb72-48d0-a0df-9ea3a770a86c" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 20.490s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 942.642811] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-89d7d3a7-990c-4ccd-817b-bc9681fc8409 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 942.642811] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-89d7d3a7-990c-4ccd-817b-bc9681fc8409 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 942.643377] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a85b2e63-855c-49ab-ae7d-fa79a39e450a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.650328] env[69328]: DEBUG oslo_vmware.api [None req-89d7d3a7-990c-4ccd-817b-bc9681fc8409 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 942.650328] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]525ba51c-2625-9f23-b4ce-877f075db496" [ 942.650328] env[69328]: _type = "Task" [ 942.650328] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.661947] env[69328]: DEBUG oslo_vmware.api [None req-89d7d3a7-990c-4ccd-817b-bc9681fc8409 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]525ba51c-2625-9f23-b4ce-877f075db496, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.841628] env[69328]: DEBUG oslo_vmware.api [None req-9cc24f8b-1cee-4304-95fa-957936787710 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273605, 'name': PowerOffVM_Task, 'duration_secs': 0.207755} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.841973] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cc24f8b-1cee-4304-95fa-957936787710 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 942.842261] env[69328]: DEBUG nova.compute.manager [None req-9cc24f8b-1cee-4304-95fa-957936787710 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 942.843097] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a80fe9b3-4234-4e27-9a8c-dd964ccf9e36 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.023132] env[69328]: DEBUG oslo_vmware.api [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52a307de-0cfe-4e16-fa35-c8738ce7518e, 'name': SearchDatastore_Task, 'duration_secs': 0.028331} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.023132] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 943.023495] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 82e27131-b401-4885-83fb-825e5c8e2444/82e27131-b401-4885-83fb-825e5c8e2444.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 943.023600] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-787cf3ff-8391-4adc-bec6-6b63dcbe20fc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.032670] env[69328]: DEBUG oslo_vmware.api [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Waiting for the task: (returnval){ [ 943.032670] env[69328]: value = "task-3273607" [ 943.032670] env[69328]: _type = "Task" [ 943.032670] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.044372] env[69328]: DEBUG oslo_vmware.api [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273607, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.061330] env[69328]: DEBUG nova.scheduler.client.report [None req-7b54d2e5-71f6-4aae-aebe-2a4213df214c tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 943.157096] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f94e39fa-c995-4316-94a4-8df2c241aba1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Creating linked-clone VM from snapshot {{(pid=69328) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 943.157995] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-5f2a8183-a6c2-4077-b7ca-e006c1dd9f61 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.176766] env[69328]: DEBUG oslo_vmware.api [None req-89d7d3a7-990c-4ccd-817b-bc9681fc8409 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]525ba51c-2625-9f23-b4ce-877f075db496, 'name': SearchDatastore_Task, 'duration_secs': 0.010995} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.179396] env[69328]: DEBUG oslo_vmware.api [None req-f94e39fa-c995-4316-94a4-8df2c241aba1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 943.179396] env[69328]: value = "task-3273608" [ 943.179396] env[69328]: _type = "Task" [ 943.179396] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.179618] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-afce0d86-241f-4658-a049-eaeb2120b817 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.192912] env[69328]: DEBUG oslo_vmware.api [None req-89d7d3a7-990c-4ccd-817b-bc9681fc8409 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 943.192912] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52948cfc-e45b-bb0c-7db3-b16581a5a1c4" [ 943.192912] env[69328]: _type = "Task" [ 943.192912] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.198063] env[69328]: DEBUG oslo_vmware.api [None req-f94e39fa-c995-4316-94a4-8df2c241aba1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273608, 'name': CloneVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.205816] env[69328]: DEBUG oslo_vmware.api [None req-89d7d3a7-990c-4ccd-817b-bc9681fc8409 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52948cfc-e45b-bb0c-7db3-b16581a5a1c4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.358883] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9cc24f8b-1cee-4304-95fa-957936787710 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "0a485411-3206-4674-90e4-58df4a8b755a" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.062s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 943.554027] env[69328]: DEBUG oslo_vmware.api [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273607, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.565267] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7b54d2e5-71f6-4aae-aebe-2a4213df214c tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.007s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 943.569229] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cc57710c-7228-448c-9e4f-bc5e76ac2170 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.869s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 943.569229] env[69328]: DEBUG nova.objects.instance [None req-cc57710c-7228-448c-9e4f-bc5e76ac2170 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Lazy-loading 'resources' on Instance uuid 1f568ba1-8591-499b-b1ee-da16e26f81fc {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 943.607258] env[69328]: INFO nova.scheduler.client.report [None req-7b54d2e5-71f6-4aae-aebe-2a4213df214c tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Deleted allocations for instance 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494 [ 943.706746] env[69328]: DEBUG oslo_vmware.api [None req-f94e39fa-c995-4316-94a4-8df2c241aba1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273608, 'name': CloneVM_Task} progress is 94%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.714655] env[69328]: DEBUG oslo_vmware.api [None req-89d7d3a7-990c-4ccd-817b-bc9681fc8409 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52948cfc-e45b-bb0c-7db3-b16581a5a1c4, 'name': SearchDatastore_Task, 'duration_secs': 0.017376} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.714655] env[69328]: DEBUG oslo_concurrency.lockutils [None req-89d7d3a7-990c-4ccd-817b-bc9681fc8409 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 943.714655] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-89d7d3a7-990c-4ccd-817b-bc9681fc8409 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] c751ef77-c3be-46cd-b7eb-fe139bf0998b/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318-rescue.vmdk. {{(pid=69328) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 943.714834] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c7747c08-1360-4969-bde0-59fa0a208537 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.723639] env[69328]: DEBUG oslo_vmware.api [None req-89d7d3a7-990c-4ccd-817b-bc9681fc8409 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 943.723639] env[69328]: value = "task-3273609" [ 943.723639] env[69328]: _type = "Task" [ 943.723639] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.735071] env[69328]: DEBUG oslo_vmware.api [None req-89d7d3a7-990c-4ccd-817b-bc9681fc8409 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273609, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.817136] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8de76a72-155b-4048-ab4a-181ef8187ce3 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "0a485411-3206-4674-90e4-58df4a8b755a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 943.817466] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8de76a72-155b-4048-ab4a-181ef8187ce3 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "0a485411-3206-4674-90e4-58df4a8b755a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 943.817697] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8de76a72-155b-4048-ab4a-181ef8187ce3 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "0a485411-3206-4674-90e4-58df4a8b755a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 943.817953] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8de76a72-155b-4048-ab4a-181ef8187ce3 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "0a485411-3206-4674-90e4-58df4a8b755a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 943.818193] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8de76a72-155b-4048-ab4a-181ef8187ce3 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "0a485411-3206-4674-90e4-58df4a8b755a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 943.820500] env[69328]: INFO nova.compute.manager [None req-8de76a72-155b-4048-ab4a-181ef8187ce3 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Terminating instance [ 944.051023] env[69328]: DEBUG oslo_vmware.api [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273607, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.568537} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.051023] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 82e27131-b401-4885-83fb-825e5c8e2444/82e27131-b401-4885-83fb-825e5c8e2444.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 944.051023] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 944.051023] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1d15b7ae-6e6b-407e-b539-a6503ef17456 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.058818] env[69328]: DEBUG oslo_vmware.api [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Waiting for the task: (returnval){ [ 944.058818] env[69328]: value = "task-3273610" [ 944.058818] env[69328]: _type = "Task" [ 944.058818] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.069579] env[69328]: DEBUG oslo_vmware.api [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273610, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.116502] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7b54d2e5-71f6-4aae-aebe-2a4213df214c tempest-ImagesOneServerNegativeTestJSON-1078222676 tempest-ImagesOneServerNegativeTestJSON-1078222676-project-member] Lock "73d5b248-3c3e-4e38-8d9c-1f9bfdb38494" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.670s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 944.195471] env[69328]: DEBUG oslo_vmware.api [None req-f94e39fa-c995-4316-94a4-8df2c241aba1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273608, 'name': CloneVM_Task} progress is 94%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.245490] env[69328]: DEBUG oslo_vmware.api [None req-89d7d3a7-990c-4ccd-817b-bc9681fc8409 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273609, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.328356] env[69328]: DEBUG nova.compute.manager [None req-8de76a72-155b-4048-ab4a-181ef8187ce3 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 944.329510] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8de76a72-155b-4048-ab4a-181ef8187ce3 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 944.330929] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60ae7561-b773-4075-b2d1-40ced5e34783 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.345025] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8de76a72-155b-4048-ab4a-181ef8187ce3 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 944.345025] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fcbe62e3-254a-4513-8c39-f5f7c40f63ec {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.491321] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3078b6fc-6f18-4de4-9117-e4c7bea5a548 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.501438] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d08174b5-ca5b-47a7-88d0-39ea7bee60a7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.538078] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9c7c8df-64ca-438f-8b9c-ab07dbd013d0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.550515] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59963fe1-9ad3-49a8-b1b4-5bb94536c3f3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.576427] env[69328]: DEBUG nova.compute.provider_tree [None req-cc57710c-7228-448c-9e4f-bc5e76ac2170 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 944.588308] env[69328]: DEBUG oslo_vmware.api [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273610, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071574} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.588308] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 944.588308] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a11b7e0-b169-4bf8-a2ab-165a3b2c251e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.617739] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] Reconfiguring VM instance instance-0000004f to attach disk [datastore2] 82e27131-b401-4885-83fb-825e5c8e2444/82e27131-b401-4885-83fb-825e5c8e2444.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 944.617948] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f548bc5c-4a38-46cd-af16-b7620f33074a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.640952] env[69328]: DEBUG oslo_vmware.api [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Waiting for the task: (returnval){ [ 944.640952] env[69328]: value = "task-3273612" [ 944.640952] env[69328]: _type = "Task" [ 944.640952] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.650363] env[69328]: DEBUG oslo_vmware.api [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273612, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.696098] env[69328]: DEBUG oslo_vmware.api [None req-f94e39fa-c995-4316-94a4-8df2c241aba1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273608, 'name': CloneVM_Task} progress is 94%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.708920] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d76b6475-e53b-41ad-a9cf-58f1a146b704 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "fd72bae3-cb72-48d0-a0df-9ea3a770a86c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 944.708920] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d76b6475-e53b-41ad-a9cf-58f1a146b704 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "fd72bae3-cb72-48d0-a0df-9ea3a770a86c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 944.708920] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d76b6475-e53b-41ad-a9cf-58f1a146b704 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "fd72bae3-cb72-48d0-a0df-9ea3a770a86c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 944.708920] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d76b6475-e53b-41ad-a9cf-58f1a146b704 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "fd72bae3-cb72-48d0-a0df-9ea3a770a86c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 944.708920] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d76b6475-e53b-41ad-a9cf-58f1a146b704 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "fd72bae3-cb72-48d0-a0df-9ea3a770a86c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 944.710233] env[69328]: INFO nova.compute.manager [None req-d76b6475-e53b-41ad-a9cf-58f1a146b704 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Terminating instance [ 944.740807] env[69328]: DEBUG oslo_vmware.api [None req-89d7d3a7-990c-4ccd-817b-bc9681fc8409 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273609, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.839367} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.741958] env[69328]: INFO nova.virt.vmwareapi.ds_util [None req-89d7d3a7-990c-4ccd-817b-bc9681fc8409 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] c751ef77-c3be-46cd-b7eb-fe139bf0998b/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318-rescue.vmdk. [ 944.743298] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e4f6ff9-2855-4231-84cf-ec29104a207c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.775733] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-89d7d3a7-990c-4ccd-817b-bc9681fc8409 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Reconfiguring VM instance instance-00000048 to attach disk [datastore1] c751ef77-c3be-46cd-b7eb-fe139bf0998b/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318-rescue.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 944.776354] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-76a60c66-daf2-44ba-8e9b-a0b84bfd60f4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.805712] env[69328]: DEBUG oslo_vmware.api [None req-89d7d3a7-990c-4ccd-817b-bc9681fc8409 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 944.805712] env[69328]: value = "task-3273613" [ 944.805712] env[69328]: _type = "Task" [ 944.805712] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.816015] env[69328]: DEBUG oslo_vmware.api [None req-89d7d3a7-990c-4ccd-817b-bc9681fc8409 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273613, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.079975] env[69328]: DEBUG nova.scheduler.client.report [None req-cc57710c-7228-448c-9e4f-bc5e76ac2170 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 945.153836] env[69328]: DEBUG oslo_vmware.api [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273612, 'name': ReconfigVM_Task, 'duration_secs': 0.351434} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.154646] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] Reconfigured VM instance instance-0000004f to attach disk [datastore2] 82e27131-b401-4885-83fb-825e5c8e2444/82e27131-b401-4885-83fb-825e5c8e2444.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 945.155163] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-499662f4-0400-4ab8-a59d-bf959c2ae4ff {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.164790] env[69328]: DEBUG oslo_vmware.api [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Waiting for the task: (returnval){ [ 945.164790] env[69328]: value = "task-3273614" [ 945.164790] env[69328]: _type = "Task" [ 945.164790] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.174200] env[69328]: DEBUG oslo_vmware.api [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273614, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.196305] env[69328]: DEBUG oslo_vmware.api [None req-f94e39fa-c995-4316-94a4-8df2c241aba1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273608, 'name': CloneVM_Task, 'duration_secs': 1.969823} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.196606] env[69328]: INFO nova.virt.vmwareapi.vmops [None req-f94e39fa-c995-4316-94a4-8df2c241aba1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Created linked-clone VM from snapshot [ 945.197495] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42154306-c006-4c68-be93-5aea32fe61d0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.208652] env[69328]: DEBUG nova.virt.vmwareapi.images [None req-f94e39fa-c995-4316-94a4-8df2c241aba1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Uploading image 44b9c7fb-3d5c-4708-a4ae-a8a2aba3a7ca {{(pid=69328) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 945.215339] env[69328]: DEBUG nova.compute.manager [None req-d76b6475-e53b-41ad-a9cf-58f1a146b704 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 945.215339] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d76b6475-e53b-41ad-a9cf-58f1a146b704 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 945.216166] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7abb713a-170b-436f-ba15-0869447580e6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.227976] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-d76b6475-e53b-41ad-a9cf-58f1a146b704 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 945.227976] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fb1a4652-37ac-4ebb-a130-8845ede832c3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.229968] env[69328]: DEBUG oslo_vmware.rw_handles [None req-f94e39fa-c995-4316-94a4-8df2c241aba1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 945.229968] env[69328]: value = "vm-653869" [ 945.229968] env[69328]: _type = "VirtualMachine" [ 945.229968] env[69328]: }. {{(pid=69328) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 945.230537] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-6e6db755-97bd-4ef1-8ada-3e5c8d951344 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.239871] env[69328]: DEBUG oslo_vmware.api [None req-d76b6475-e53b-41ad-a9cf-58f1a146b704 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 945.239871] env[69328]: value = "task-3273615" [ 945.239871] env[69328]: _type = "Task" [ 945.239871] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.241831] env[69328]: DEBUG oslo_vmware.rw_handles [None req-f94e39fa-c995-4316-94a4-8df2c241aba1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lease: (returnval){ [ 945.241831] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5215ceee-4c87-d1fa-6185-899f1a6fff89" [ 945.241831] env[69328]: _type = "HttpNfcLease" [ 945.241831] env[69328]: } obtained for exporting VM: (result){ [ 945.241831] env[69328]: value = "vm-653869" [ 945.241831] env[69328]: _type = "VirtualMachine" [ 945.241831] env[69328]: }. {{(pid=69328) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 945.242453] env[69328]: DEBUG oslo_vmware.api [None req-f94e39fa-c995-4316-94a4-8df2c241aba1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the lease: (returnval){ [ 945.242453] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5215ceee-4c87-d1fa-6185-899f1a6fff89" [ 945.242453] env[69328]: _type = "HttpNfcLease" [ 945.242453] env[69328]: } to be ready. {{(pid=69328) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 945.258090] env[69328]: DEBUG oslo_vmware.api [None req-d76b6475-e53b-41ad-a9cf-58f1a146b704 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273615, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.258794] env[69328]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 945.258794] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5215ceee-4c87-d1fa-6185-899f1a6fff89" [ 945.258794] env[69328]: _type = "HttpNfcLease" [ 945.258794] env[69328]: } is initializing. {{(pid=69328) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 945.319060] env[69328]: DEBUG oslo_vmware.api [None req-89d7d3a7-990c-4ccd-817b-bc9681fc8409 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273613, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.455873] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "96f604a9-e42c-4aa8-b5b5-edcb34901d94" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 945.456400] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "96f604a9-e42c-4aa8-b5b5-edcb34901d94" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 945.460020] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8de76a72-155b-4048-ab4a-181ef8187ce3 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 945.460020] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8de76a72-155b-4048-ab4a-181ef8187ce3 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 945.460020] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-8de76a72-155b-4048-ab4a-181ef8187ce3 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Deleting the datastore file [datastore1] 0a485411-3206-4674-90e4-58df4a8b755a {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 945.460020] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ac96392a-7086-4a6b-ae7b-eff3f960f67e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.469137] env[69328]: DEBUG oslo_vmware.api [None req-8de76a72-155b-4048-ab4a-181ef8187ce3 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 945.469137] env[69328]: value = "task-3273617" [ 945.469137] env[69328]: _type = "Task" [ 945.469137] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.480082] env[69328]: DEBUG oslo_vmware.api [None req-8de76a72-155b-4048-ab4a-181ef8187ce3 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273617, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.585082] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cc57710c-7228-448c-9e4f-bc5e76ac2170 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.017s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 945.588073] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.574s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 945.590260] env[69328]: INFO nova.compute.claims [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 945.618653] env[69328]: INFO nova.scheduler.client.report [None req-cc57710c-7228-448c-9e4f-bc5e76ac2170 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Deleted allocations for instance 1f568ba1-8591-499b-b1ee-da16e26f81fc [ 945.685666] env[69328]: DEBUG oslo_vmware.api [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273614, 'name': Rename_Task, 'duration_secs': 0.377801} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.686372] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 945.686618] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-daca9fc2-462c-4dfb-94f4-9df7587decd1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.701983] env[69328]: DEBUG oslo_vmware.api [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Waiting for the task: (returnval){ [ 945.701983] env[69328]: value = "task-3273618" [ 945.701983] env[69328]: _type = "Task" [ 945.701983] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.715108] env[69328]: DEBUG oslo_vmware.api [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273618, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.758698] env[69328]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 945.758698] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5215ceee-4c87-d1fa-6185-899f1a6fff89" [ 945.758698] env[69328]: _type = "HttpNfcLease" [ 945.758698] env[69328]: } is ready. {{(pid=69328) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 945.758937] env[69328]: DEBUG oslo_vmware.api [None req-d76b6475-e53b-41ad-a9cf-58f1a146b704 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273615, 'name': PowerOffVM_Task, 'duration_secs': 0.356466} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.762581] env[69328]: DEBUG oslo_vmware.rw_handles [None req-f94e39fa-c995-4316-94a4-8df2c241aba1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 945.762581] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5215ceee-4c87-d1fa-6185-899f1a6fff89" [ 945.762581] env[69328]: _type = "HttpNfcLease" [ 945.762581] env[69328]: }. {{(pid=69328) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 945.762581] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-d76b6475-e53b-41ad-a9cf-58f1a146b704 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 945.762581] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d76b6475-e53b-41ad-a9cf-58f1a146b704 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 945.762581] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4da8dd11-1f3f-4f8b-b2d2-be085d1c8cca {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.763130] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d722b9df-c26e-4fc4-b7a1-2342ff10c2c6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.771202] env[69328]: DEBUG oslo_vmware.rw_handles [None req-f94e39fa-c995-4316-94a4-8df2c241aba1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525a9ae2-6a0f-391a-8062-e9d8ee94119f/disk-0.vmdk from lease info. {{(pid=69328) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 945.771436] env[69328]: DEBUG oslo_vmware.rw_handles [None req-f94e39fa-c995-4316-94a4-8df2c241aba1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525a9ae2-6a0f-391a-8062-e9d8ee94119f/disk-0.vmdk for reading. {{(pid=69328) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 945.845522] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d76b6475-e53b-41ad-a9cf-58f1a146b704 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 945.845648] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d76b6475-e53b-41ad-a9cf-58f1a146b704 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 945.845816] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-d76b6475-e53b-41ad-a9cf-58f1a146b704 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Deleting the datastore file [datastore2] fd72bae3-cb72-48d0-a0df-9ea3a770a86c {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 945.847407] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aab929b1-6c2a-4fdf-9724-252172942ee3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.855517] env[69328]: DEBUG oslo_vmware.api [None req-89d7d3a7-990c-4ccd-817b-bc9681fc8409 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273613, 'name': ReconfigVM_Task, 'duration_secs': 0.650319} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.857154] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-89d7d3a7-990c-4ccd-817b-bc9681fc8409 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Reconfigured VM instance instance-00000048 to attach disk [datastore1] c751ef77-c3be-46cd-b7eb-fe139bf0998b/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318-rescue.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 945.857512] env[69328]: DEBUG oslo_vmware.api [None req-d76b6475-e53b-41ad-a9cf-58f1a146b704 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 945.857512] env[69328]: value = "task-3273620" [ 945.857512] env[69328]: _type = "Task" [ 945.857512] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.858297] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f82db1ca-d42e-4683-9db7-d199dd44b402 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.870794] env[69328]: DEBUG oslo_vmware.api [None req-d76b6475-e53b-41ad-a9cf-58f1a146b704 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273620, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.893870] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5ed8149d-b71e-4097-9d29-4bd55cf91720 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.910676] env[69328]: DEBUG oslo_vmware.api [None req-89d7d3a7-990c-4ccd-817b-bc9681fc8409 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 945.910676] env[69328]: value = "task-3273621" [ 945.910676] env[69328]: _type = "Task" [ 945.910676] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.920951] env[69328]: DEBUG oslo_vmware.api [None req-89d7d3a7-990c-4ccd-817b-bc9681fc8409 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273621, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.963070] env[69328]: DEBUG nova.compute.manager [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 945.982637] env[69328]: DEBUG oslo_vmware.api [None req-8de76a72-155b-4048-ab4a-181ef8187ce3 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273617, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.372543} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.985267] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-8de76a72-155b-4048-ab4a-181ef8187ce3 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 945.985479] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8de76a72-155b-4048-ab4a-181ef8187ce3 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 945.985706] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8de76a72-155b-4048-ab4a-181ef8187ce3 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 945.985822] env[69328]: INFO nova.compute.manager [None req-8de76a72-155b-4048-ab4a-181ef8187ce3 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Took 1.66 seconds to destroy the instance on the hypervisor. [ 945.986514] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8de76a72-155b-4048-ab4a-181ef8187ce3 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 945.986660] env[69328]: DEBUG nova.compute.manager [-] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 945.986720] env[69328]: DEBUG nova.network.neutron [-] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 945.990026] env[69328]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-92caa744-a83a-441f-8f6c-27344655a632 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.132781] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cc57710c-7228-448c-9e4f-bc5e76ac2170 tempest-ServersTestMultiNic-123064241 tempest-ServersTestMultiNic-123064241-project-member] Lock "1f568ba1-8591-499b-b1ee-da16e26f81fc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.124s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 946.219929] env[69328]: DEBUG oslo_vmware.api [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273618, 'name': PowerOnVM_Task, 'duration_secs': 0.514523} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.219929] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 946.220106] env[69328]: INFO nova.compute.manager [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] Took 9.00 seconds to spawn the instance on the hypervisor. [ 946.221164] env[69328]: DEBUG nova.compute.manager [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 946.221164] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-485b8e7c-12a6-40de-9d75-3eb34557890d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.377133] env[69328]: DEBUG oslo_vmware.api [None req-d76b6475-e53b-41ad-a9cf-58f1a146b704 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273620, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.177539} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.377534] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-d76b6475-e53b-41ad-a9cf-58f1a146b704 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 946.378339] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d76b6475-e53b-41ad-a9cf-58f1a146b704 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 946.378615] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d76b6475-e53b-41ad-a9cf-58f1a146b704 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 946.378879] env[69328]: INFO nova.compute.manager [None req-d76b6475-e53b-41ad-a9cf-58f1a146b704 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Took 1.16 seconds to destroy the instance on the hypervisor. [ 946.379241] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d76b6475-e53b-41ad-a9cf-58f1a146b704 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 946.380039] env[69328]: DEBUG nova.compute.manager [-] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 946.380039] env[69328]: DEBUG nova.network.neutron [-] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 946.432624] env[69328]: DEBUG oslo_vmware.api [None req-89d7d3a7-990c-4ccd-817b-bc9681fc8409 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273621, 'name': ReconfigVM_Task, 'duration_secs': 0.200091} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.432624] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-89d7d3a7-990c-4ccd-817b-bc9681fc8409 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 946.432624] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-da8cadd5-124e-42d5-864b-ab207e099454 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.448079] env[69328]: DEBUG oslo_vmware.api [None req-89d7d3a7-990c-4ccd-817b-bc9681fc8409 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 946.448079] env[69328]: value = "task-3273622" [ 946.448079] env[69328]: _type = "Task" [ 946.448079] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.456220] env[69328]: DEBUG oslo_vmware.api [None req-89d7d3a7-990c-4ccd-817b-bc9681fc8409 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273622, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.495866] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 946.527648] env[69328]: DEBUG nova.compute.manager [req-46d2cd67-06df-4710-a817-79182ee50ab8 req-a861e658-a5f9-4a51-ad7d-030ccc55c4a6 service nova] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Received event network-vif-deleted-c68f39c4-deae-4739-bc3f-9284775789c4 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 946.527872] env[69328]: INFO nova.compute.manager [req-46d2cd67-06df-4710-a817-79182ee50ab8 req-a861e658-a5f9-4a51-ad7d-030ccc55c4a6 service nova] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Neutron deleted interface c68f39c4-deae-4739-bc3f-9284775789c4; detaching it from the instance and deleting it from the info cache [ 946.528038] env[69328]: DEBUG nova.network.neutron [req-46d2cd67-06df-4710-a817-79182ee50ab8 req-a861e658-a5f9-4a51-ad7d-030ccc55c4a6 service nova] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 946.750271] env[69328]: INFO nova.compute.manager [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] Took 31.88 seconds to build instance. [ 946.846151] env[69328]: DEBUG oslo_concurrency.lockutils [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquiring lock "65e38a02-880b-46e2-8866-645a9fc17c7a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 946.847544] env[69328]: DEBUG oslo_concurrency.lockutils [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Lock "65e38a02-880b-46e2-8866-645a9fc17c7a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 946.961596] env[69328]: DEBUG oslo_vmware.api [None req-89d7d3a7-990c-4ccd-817b-bc9681fc8409 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273622, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.993902] env[69328]: DEBUG nova.network.neutron [-] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.035344] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f294c5e3-062d-4467-a42a-d01a80e39712 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.055734] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5071e539-7207-4ba6-89cc-b9816ae5ecee {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.099593] env[69328]: DEBUG nova.compute.manager [req-46d2cd67-06df-4710-a817-79182ee50ab8 req-a861e658-a5f9-4a51-ad7d-030ccc55c4a6 service nova] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Detach interface failed, port_id=c68f39c4-deae-4739-bc3f-9284775789c4, reason: Instance 0a485411-3206-4674-90e4-58df4a8b755a could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 947.135648] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30ef9e0d-7ac5-4c89-b349-2f521681efee {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.150398] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e640baba-e560-429f-a830-78fb344407c9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.185914] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58a227ee-ca34-47cc-9535-9b3d2a74102f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.196324] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-033bbb23-4895-4f2e-8a1b-53514ca21cb8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.213210] env[69328]: DEBUG nova.compute.provider_tree [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 947.229799] env[69328]: DEBUG nova.compute.manager [req-8bdf7aed-2bf2-4723-b365-fbe23ccfad3c req-05a36712-9ebb-48aa-9320-16750b901c34 service nova] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Received event network-vif-deleted-eebd5d04-278d-4e22-9e5d-df5ae37877cf {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 947.229988] env[69328]: INFO nova.compute.manager [req-8bdf7aed-2bf2-4723-b365-fbe23ccfad3c req-05a36712-9ebb-48aa-9320-16750b901c34 service nova] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Neutron deleted interface eebd5d04-278d-4e22-9e5d-df5ae37877cf; detaching it from the instance and deleting it from the info cache [ 947.230178] env[69328]: DEBUG nova.network.neutron [req-8bdf7aed-2bf2-4723-b365-fbe23ccfad3c req-05a36712-9ebb-48aa-9320-16750b901c34 service nova] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.253403] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d0aaba8c-0a38-414a-aec5-a1f5f42324e4 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Lock "82e27131-b401-4885-83fb-825e5c8e2444" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.387s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 947.351340] env[69328]: DEBUG nova.compute.manager [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 947.460969] env[69328]: DEBUG oslo_vmware.api [None req-89d7d3a7-990c-4ccd-817b-bc9681fc8409 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273622, 'name': PowerOnVM_Task, 'duration_secs': 0.597331} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.461291] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-89d7d3a7-990c-4ccd-817b-bc9681fc8409 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 947.466824] env[69328]: DEBUG nova.compute.manager [None req-89d7d3a7-990c-4ccd-817b-bc9681fc8409 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 947.467770] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb102dfd-725a-4394-846d-035029dd4b59 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.505933] env[69328]: INFO nova.compute.manager [-] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Took 1.52 seconds to deallocate network for instance. [ 947.513107] env[69328]: DEBUG oslo_concurrency.lockutils [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquiring lock "a95d01cf-c26b-466c-a5b6-a7e43f0321fa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 947.513333] env[69328]: DEBUG oslo_concurrency.lockutils [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Lock "a95d01cf-c26b-466c-a5b6-a7e43f0321fa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 947.624064] env[69328]: DEBUG nova.network.neutron [-] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.650095] env[69328]: DEBUG oslo_concurrency.lockutils [None req-aa421e2c-9c56-4abb-8b44-da66e527c182 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Acquiring lock "82e27131-b401-4885-83fb-825e5c8e2444" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 947.650368] env[69328]: DEBUG oslo_concurrency.lockutils [None req-aa421e2c-9c56-4abb-8b44-da66e527c182 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Lock "82e27131-b401-4885-83fb-825e5c8e2444" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 947.650624] env[69328]: DEBUG oslo_concurrency.lockutils [None req-aa421e2c-9c56-4abb-8b44-da66e527c182 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Acquiring lock "82e27131-b401-4885-83fb-825e5c8e2444-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 947.651099] env[69328]: DEBUG oslo_concurrency.lockutils [None req-aa421e2c-9c56-4abb-8b44-da66e527c182 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Lock "82e27131-b401-4885-83fb-825e5c8e2444-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 947.651259] env[69328]: DEBUG oslo_concurrency.lockutils [None req-aa421e2c-9c56-4abb-8b44-da66e527c182 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Lock "82e27131-b401-4885-83fb-825e5c8e2444-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 947.653418] env[69328]: INFO nova.compute.manager [None req-aa421e2c-9c56-4abb-8b44-da66e527c182 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] Terminating instance [ 947.718212] env[69328]: DEBUG nova.scheduler.client.report [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 947.734682] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bd8faa32-5655-4860-a304-aa3a66246d72 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.746972] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9167cd60-5263-4fad-a2bf-834a6e47bd29 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.794182] env[69328]: DEBUG nova.compute.manager [req-8bdf7aed-2bf2-4723-b365-fbe23ccfad3c req-05a36712-9ebb-48aa-9320-16750b901c34 service nova] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Detach interface failed, port_id=eebd5d04-278d-4e22-9e5d-df5ae37877cf, reason: Instance fd72bae3-cb72-48d0-a0df-9ea3a770a86c could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 947.877401] env[69328]: DEBUG oslo_concurrency.lockutils [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 948.018914] env[69328]: DEBUG nova.compute.manager [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 948.022188] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8de76a72-155b-4048-ab4a-181ef8187ce3 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 948.127267] env[69328]: INFO nova.compute.manager [-] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Took 1.75 seconds to deallocate network for instance. [ 948.157761] env[69328]: DEBUG nova.compute.manager [None req-aa421e2c-9c56-4abb-8b44-da66e527c182 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 948.158076] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-aa421e2c-9c56-4abb-8b44-da66e527c182 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 948.159011] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99e191fe-5736-41d0-959f-0e516e4b2a49 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.168531] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa421e2c-9c56-4abb-8b44-da66e527c182 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 948.169547] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5e16e062-02eb-45b9-b375-07611cf8b934 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.178277] env[69328]: DEBUG oslo_vmware.api [None req-aa421e2c-9c56-4abb-8b44-da66e527c182 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Waiting for the task: (returnval){ [ 948.178277] env[69328]: value = "task-3273623" [ 948.178277] env[69328]: _type = "Task" [ 948.178277] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.188201] env[69328]: DEBUG oslo_vmware.api [None req-aa421e2c-9c56-4abb-8b44-da66e527c182 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273623, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.223242] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.635s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 948.225690] env[69328]: DEBUG nova.compute.manager [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 948.230149] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.734s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 948.232195] env[69328]: INFO nova.compute.claims [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 948.539799] env[69328]: DEBUG oslo_concurrency.lockutils [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 948.634117] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d76b6475-e53b-41ad-a9cf-58f1a146b704 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 948.688746] env[69328]: DEBUG oslo_vmware.api [None req-aa421e2c-9c56-4abb-8b44-da66e527c182 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273623, 'name': PowerOffVM_Task, 'duration_secs': 0.315222} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.689031] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa421e2c-9c56-4abb-8b44-da66e527c182 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 948.689917] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-aa421e2c-9c56-4abb-8b44-da66e527c182 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 948.690259] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-61a4db14-b02f-4fa8-bc85-ab5ea76c4ad9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.737778] env[69328]: DEBUG nova.compute.utils [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 948.742063] env[69328]: DEBUG nova.compute.manager [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 948.742962] env[69328]: DEBUG nova.network.neutron [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 948.762911] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-aa421e2c-9c56-4abb-8b44-da66e527c182 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 948.763201] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-aa421e2c-9c56-4abb-8b44-da66e527c182 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 948.763420] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa421e2c-9c56-4abb-8b44-da66e527c182 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Deleting the datastore file [datastore2] 82e27131-b401-4885-83fb-825e5c8e2444 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 948.763605] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-15d75ec9-9f03-4fd8-a538-a40735c66d4b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.770098] env[69328]: DEBUG oslo_vmware.api [None req-aa421e2c-9c56-4abb-8b44-da66e527c182 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Waiting for the task: (returnval){ [ 948.770098] env[69328]: value = "task-3273625" [ 948.770098] env[69328]: _type = "Task" [ 948.770098] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.778922] env[69328]: DEBUG oslo_vmware.api [None req-aa421e2c-9c56-4abb-8b44-da66e527c182 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273625, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.980276] env[69328]: INFO nova.compute.manager [None req-0b85c47e-a51c-4c3c-8b33-4882b1ed4cfb tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Unrescuing [ 948.980276] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0b85c47e-a51c-4c3c-8b33-4882b1ed4cfb tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquiring lock "refresh_cache-c751ef77-c3be-46cd-b7eb-fe139bf0998b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.980276] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0b85c47e-a51c-4c3c-8b33-4882b1ed4cfb tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquired lock "refresh_cache-c751ef77-c3be-46cd-b7eb-fe139bf0998b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 948.980276] env[69328]: DEBUG nova.network.neutron [None req-0b85c47e-a51c-4c3c-8b33-4882b1ed4cfb tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 949.046202] env[69328]: DEBUG nova.policy [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a8fbe2a134194d29af48ac8e4986d0cb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd86de4d5055642aa86a29c6768e3db46', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 949.253182] env[69328]: DEBUG nova.compute.manager [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 949.284963] env[69328]: DEBUG oslo_vmware.api [None req-aa421e2c-9c56-4abb-8b44-da66e527c182 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273625, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.16166} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.289452] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa421e2c-9c56-4abb-8b44-da66e527c182 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 949.289452] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-aa421e2c-9c56-4abb-8b44-da66e527c182 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 949.289452] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-aa421e2c-9c56-4abb-8b44-da66e527c182 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 949.289452] env[69328]: INFO nova.compute.manager [None req-aa421e2c-9c56-4abb-8b44-da66e527c182 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] Took 1.13 seconds to destroy the instance on the hypervisor. [ 949.289776] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-aa421e2c-9c56-4abb-8b44-da66e527c182 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 949.290222] env[69328]: DEBUG nova.compute.manager [-] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 949.290316] env[69328]: DEBUG nova.network.neutron [-] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 949.659386] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d20a9fe-fad2-441d-8807-10cf921874ac {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.672741] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e590f610-e036-4aaf-9702-898a0e094924 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.677482] env[69328]: DEBUG nova.compute.manager [req-0fe6d123-685d-4873-8a56-0a8b9bdf605f req-5cb6d13d-8af1-4b9a-899f-5ff59d7c9753 service nova] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] Received event network-vif-deleted-846c98df-5570-4e8b-8e8b-353bf9825281 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 949.677923] env[69328]: INFO nova.compute.manager [req-0fe6d123-685d-4873-8a56-0a8b9bdf605f req-5cb6d13d-8af1-4b9a-899f-5ff59d7c9753 service nova] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] Neutron deleted interface 846c98df-5570-4e8b-8e8b-353bf9825281; detaching it from the instance and deleting it from the info cache [ 949.678158] env[69328]: DEBUG nova.network.neutron [req-0fe6d123-685d-4873-8a56-0a8b9bdf605f req-5cb6d13d-8af1-4b9a-899f-5ff59d7c9753 service nova] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 949.711737] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f9245c3-09b8-4c41-978a-82a8f8304bd8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.722392] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0725d35-0956-4a72-a0d7-b25705d6ec8f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.738629] env[69328]: DEBUG nova.compute.provider_tree [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 949.894190] env[69328]: DEBUG nova.network.neutron [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] Successfully created port: 322eba88-5363-41a8-a44d-50e0a7fdf92e {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 949.933421] env[69328]: DEBUG nova.network.neutron [None req-0b85c47e-a51c-4c3c-8b33-4882b1ed4cfb tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Updating instance_info_cache with network_info: [{"id": "2a3862dd-bd04-40ed-9d66-1fa2418297ea", "address": "fa:16:3e:ad:2d:2a", "network": {"id": "bd41ac10-1850-45a2-8e46-6ebdca3d8e13", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-766393176-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efd0e2d2f9ba4416bd8fd08dad912465", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e614f8e-6b11-4b6b-a421-904bca6acd91", "external-id": "nsx-vlan-transportzone-923", "segmentation_id": 923, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a3862dd-bd", "ovs_interfaceid": "2a3862dd-bd04-40ed-9d66-1fa2418297ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 950.151042] env[69328]: DEBUG nova.network.neutron [-] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 950.183119] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-10197436-2d09-460f-b377-ef2bc0d9fc3c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.193753] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54303a8c-2ad1-4bd9-b1eb-f83b41c5ce6a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.242858] env[69328]: DEBUG nova.scheduler.client.report [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 950.247178] env[69328]: DEBUG nova.compute.manager [req-0fe6d123-685d-4873-8a56-0a8b9bdf605f req-5cb6d13d-8af1-4b9a-899f-5ff59d7c9753 service nova] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] Detach interface failed, port_id=846c98df-5570-4e8b-8e8b-353bf9825281, reason: Instance 82e27131-b401-4885-83fb-825e5c8e2444 could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 950.264049] env[69328]: DEBUG nova.compute.manager [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 950.296357] env[69328]: DEBUG nova.virt.hardware [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 950.296357] env[69328]: DEBUG nova.virt.hardware [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 950.296357] env[69328]: DEBUG nova.virt.hardware [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 950.296523] env[69328]: DEBUG nova.virt.hardware [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 950.296628] env[69328]: DEBUG nova.virt.hardware [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 950.296739] env[69328]: DEBUG nova.virt.hardware [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 950.297286] env[69328]: DEBUG nova.virt.hardware [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 950.297334] env[69328]: DEBUG nova.virt.hardware [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 950.297529] env[69328]: DEBUG nova.virt.hardware [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 950.297719] env[69328]: DEBUG nova.virt.hardware [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 950.297870] env[69328]: DEBUG nova.virt.hardware [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 950.298784] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-236af8de-a5b4-40d9-8e9f-ac1ec943c894 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.314024] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cdf188c-ff5d-4ec4-98cd-048610924419 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.385731] env[69328]: DEBUG oslo_concurrency.lockutils [None req-91666333-c907-4039-a376-7c9c991414df tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "f1be93b2-08db-41fe-87c4-f4e5f964cfa4" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 950.386066] env[69328]: DEBUG oslo_concurrency.lockutils [None req-91666333-c907-4039-a376-7c9c991414df tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "f1be93b2-08db-41fe-87c4-f4e5f964cfa4" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 950.386364] env[69328]: DEBUG nova.compute.manager [None req-91666333-c907-4039-a376-7c9c991414df tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 950.387435] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d5adbfb-41f4-4bfe-b8b8-19b01dcd1216 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.397280] env[69328]: DEBUG nova.compute.manager [None req-91666333-c907-4039-a376-7c9c991414df tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69328) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 950.397768] env[69328]: DEBUG nova.objects.instance [None req-91666333-c907-4039-a376-7c9c991414df tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lazy-loading 'flavor' on Instance uuid f1be93b2-08db-41fe-87c4-f4e5f964cfa4 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 950.440170] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0b85c47e-a51c-4c3c-8b33-4882b1ed4cfb tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Releasing lock "refresh_cache-c751ef77-c3be-46cd-b7eb-fe139bf0998b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 950.440170] env[69328]: DEBUG nova.objects.instance [None req-0b85c47e-a51c-4c3c-8b33-4882b1ed4cfb tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Lazy-loading 'flavor' on Instance uuid c751ef77-c3be-46cd-b7eb-fe139bf0998b {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 950.657288] env[69328]: INFO nova.compute.manager [-] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] Took 1.37 seconds to deallocate network for instance. [ 950.748258] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.518s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 950.749130] env[69328]: DEBUG nova.compute.manager [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 950.753726] env[69328]: DEBUG oslo_concurrency.lockutils [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.876s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 950.755729] env[69328]: INFO nova.compute.claims [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 950.947739] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2212f9b7-73b7-45cf-b81e-7c32f974ee53 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.979044] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b85c47e-a51c-4c3c-8b33-4882b1ed4cfb tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 950.979440] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b7ff0b7c-8214-4d13-9cb2-0e7fa416d048 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.981592] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8b9e830e-0a60-44c3-bc92-024100b179a6 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Acquiring lock "18022645-9a2a-489e-b0b1-486165f46f14" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 950.982155] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8b9e830e-0a60-44c3-bc92-024100b179a6 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Lock "18022645-9a2a-489e-b0b1-486165f46f14" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 950.982155] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8b9e830e-0a60-44c3-bc92-024100b179a6 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Acquiring lock "18022645-9a2a-489e-b0b1-486165f46f14-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 950.982329] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8b9e830e-0a60-44c3-bc92-024100b179a6 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Lock "18022645-9a2a-489e-b0b1-486165f46f14-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 950.982481] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8b9e830e-0a60-44c3-bc92-024100b179a6 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Lock "18022645-9a2a-489e-b0b1-486165f46f14-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 950.985515] env[69328]: INFO nova.compute.manager [None req-8b9e830e-0a60-44c3-bc92-024100b179a6 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Terminating instance [ 950.991844] env[69328]: DEBUG oslo_vmware.api [None req-0b85c47e-a51c-4c3c-8b33-4882b1ed4cfb tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 950.991844] env[69328]: value = "task-3273626" [ 950.991844] env[69328]: _type = "Task" [ 950.991844] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.005857] env[69328]: DEBUG oslo_vmware.api [None req-0b85c47e-a51c-4c3c-8b33-4882b1ed4cfb tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273626, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.163739] env[69328]: DEBUG oslo_concurrency.lockutils [None req-aa421e2c-9c56-4abb-8b44-da66e527c182 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 951.263790] env[69328]: DEBUG nova.compute.utils [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 951.270023] env[69328]: DEBUG nova.compute.manager [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 951.270023] env[69328]: DEBUG nova.network.neutron [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 951.360292] env[69328]: DEBUG nova.policy [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1685bb9a09d84a7a92306c64f0e5895e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '75d5853e3c724d02bacfa75173e38ab3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 951.409860] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-91666333-c907-4039-a376-7c9c991414df tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 951.410240] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-21e15317-b4ba-4744-baee-239f9849142a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.419892] env[69328]: DEBUG oslo_vmware.api [None req-91666333-c907-4039-a376-7c9c991414df tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 951.419892] env[69328]: value = "task-3273627" [ 951.419892] env[69328]: _type = "Task" [ 951.419892] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.429534] env[69328]: DEBUG oslo_vmware.api [None req-91666333-c907-4039-a376-7c9c991414df tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3273627, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.491194] env[69328]: DEBUG nova.compute.manager [None req-8b9e830e-0a60-44c3-bc92-024100b179a6 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 951.491470] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b9e830e-0a60-44c3-bc92-024100b179a6 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 951.491903] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5d4f1b74-1e5e-4c41-8b46-f380617ca32e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.506688] env[69328]: DEBUG oslo_vmware.api [None req-0b85c47e-a51c-4c3c-8b33-4882b1ed4cfb tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273626, 'name': PowerOffVM_Task, 'duration_secs': 0.284152} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.508299] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b85c47e-a51c-4c3c-8b33-4882b1ed4cfb tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 951.515086] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b85c47e-a51c-4c3c-8b33-4882b1ed4cfb tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Reconfiguring VM instance instance-00000048 to detach disk 2001 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 951.515368] env[69328]: DEBUG oslo_vmware.api [None req-8b9e830e-0a60-44c3-bc92-024100b179a6 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Waiting for the task: (returnval){ [ 951.515368] env[69328]: value = "task-3273628" [ 951.515368] env[69328]: _type = "Task" [ 951.515368] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.515769] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cc01b0c6-d572-4eab-92e4-ef0105123b97 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.548370] env[69328]: DEBUG oslo_vmware.api [None req-8b9e830e-0a60-44c3-bc92-024100b179a6 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Task: {'id': task-3273628, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.550724] env[69328]: DEBUG oslo_vmware.api [None req-0b85c47e-a51c-4c3c-8b33-4882b1ed4cfb tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 951.550724] env[69328]: value = "task-3273629" [ 951.550724] env[69328]: _type = "Task" [ 951.550724] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.564860] env[69328]: DEBUG oslo_vmware.api [None req-0b85c47e-a51c-4c3c-8b33-4882b1ed4cfb tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273629, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.767403] env[69328]: DEBUG nova.compute.manager [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 951.827976] env[69328]: DEBUG nova.network.neutron [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Successfully created port: d97e62a9-59f8-4f3b-9296-f5a0803d2b10 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 951.931232] env[69328]: DEBUG oslo_vmware.api [None req-91666333-c907-4039-a376-7c9c991414df tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3273627, 'name': PowerOffVM_Task, 'duration_secs': 0.187505} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.931614] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-91666333-c907-4039-a376-7c9c991414df tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 951.935019] env[69328]: DEBUG nova.compute.manager [None req-91666333-c907-4039-a376-7c9c991414df tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 951.935019] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f496d18c-817b-4974-9be7-85075a49ab94 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.957779] env[69328]: DEBUG nova.compute.manager [req-468c4646-e8b4-4cf6-a0ab-65d72669f2ef req-2f614c8d-baff-4888-8258-42a3be298bc2 service nova] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] Received event network-vif-plugged-322eba88-5363-41a8-a44d-50e0a7fdf92e {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 951.957779] env[69328]: DEBUG oslo_concurrency.lockutils [req-468c4646-e8b4-4cf6-a0ab-65d72669f2ef req-2f614c8d-baff-4888-8258-42a3be298bc2 service nova] Acquiring lock "9ad2b2e3-460a-403e-bfc7-f46648c93849-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 951.958426] env[69328]: DEBUG oslo_concurrency.lockutils [req-468c4646-e8b4-4cf6-a0ab-65d72669f2ef req-2f614c8d-baff-4888-8258-42a3be298bc2 service nova] Lock "9ad2b2e3-460a-403e-bfc7-f46648c93849-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 951.958426] env[69328]: DEBUG oslo_concurrency.lockutils [req-468c4646-e8b4-4cf6-a0ab-65d72669f2ef req-2f614c8d-baff-4888-8258-42a3be298bc2 service nova] Lock "9ad2b2e3-460a-403e-bfc7-f46648c93849-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 951.958605] env[69328]: DEBUG nova.compute.manager [req-468c4646-e8b4-4cf6-a0ab-65d72669f2ef req-2f614c8d-baff-4888-8258-42a3be298bc2 service nova] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] No waiting events found dispatching network-vif-plugged-322eba88-5363-41a8-a44d-50e0a7fdf92e {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 951.958812] env[69328]: WARNING nova.compute.manager [req-468c4646-e8b4-4cf6-a0ab-65d72669f2ef req-2f614c8d-baff-4888-8258-42a3be298bc2 service nova] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] Received unexpected event network-vif-plugged-322eba88-5363-41a8-a44d-50e0a7fdf92e for instance with vm_state building and task_state spawning. [ 952.054213] env[69328]: DEBUG oslo_vmware.api [None req-8b9e830e-0a60-44c3-bc92-024100b179a6 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Task: {'id': task-3273628, 'name': PowerOffVM_Task, 'duration_secs': 0.285049} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.061068] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b9e830e-0a60-44c3-bc92-024100b179a6 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 952.061509] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b9e830e-0a60-44c3-bc92-024100b179a6 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Volume detach. Driver type: vmdk {{(pid=69328) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 952.062487] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b9e830e-0a60-44c3-bc92-024100b179a6 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653727', 'volume_id': 'aa5050fe-f367-4822-9aa7-4bfac9106402', 'name': 'volume-aa5050fe-f367-4822-9aa7-4bfac9106402', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '18022645-9a2a-489e-b0b1-486165f46f14', 'attached_at': '', 'detached_at': '', 'volume_id': 'aa5050fe-f367-4822-9aa7-4bfac9106402', 'serial': 'aa5050fe-f367-4822-9aa7-4bfac9106402'} {{(pid=69328) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 952.063638] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-880436ae-730d-4c28-903c-2a88ca4c9176 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.071872] env[69328]: DEBUG oslo_vmware.api [None req-0b85c47e-a51c-4c3c-8b33-4882b1ed4cfb tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273629, 'name': ReconfigVM_Task, 'duration_secs': 0.287241} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.087473] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b85c47e-a51c-4c3c-8b33-4882b1ed4cfb tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Reconfigured VM instance instance-00000048 to detach disk 2001 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 952.087781] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b85c47e-a51c-4c3c-8b33-4882b1ed4cfb tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 952.090937] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-635da03f-6184-4221-a67e-5580617fe8f5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.093308] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11c320fb-47c2-44f9-8e87-2b6977955077 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.106624] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1585dc1-05e4-4848-a73a-6ef1ab162ac4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.108784] env[69328]: DEBUG oslo_vmware.api [None req-0b85c47e-a51c-4c3c-8b33-4882b1ed4cfb tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 952.108784] env[69328]: value = "task-3273630" [ 952.108784] env[69328]: _type = "Task" [ 952.108784] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.136105] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fa2a768-472e-427d-a573-efa90a2a0d68 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.146020] env[69328]: DEBUG oslo_vmware.api [None req-0b85c47e-a51c-4c3c-8b33-4882b1ed4cfb tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273630, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.146020] env[69328]: DEBUG nova.network.neutron [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] Successfully updated port: 322eba88-5363-41a8-a44d-50e0a7fdf92e {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 952.161544] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b9e830e-0a60-44c3-bc92-024100b179a6 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] The volume has not been displaced from its original location: [datastore1] volume-aa5050fe-f367-4822-9aa7-4bfac9106402/volume-aa5050fe-f367-4822-9aa7-4bfac9106402.vmdk. No consolidation needed. {{(pid=69328) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 952.167213] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b9e830e-0a60-44c3-bc92-024100b179a6 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Reconfiguring VM instance instance-0000002c to detach disk 2000 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 952.170950] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2a1d69a4-8770-4770-b2ce-735388a17a23 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.192341] env[69328]: DEBUG oslo_vmware.api [None req-8b9e830e-0a60-44c3-bc92-024100b179a6 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Waiting for the task: (returnval){ [ 952.192341] env[69328]: value = "task-3273631" [ 952.192341] env[69328]: _type = "Task" [ 952.192341] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.205986] env[69328]: DEBUG oslo_vmware.api [None req-8b9e830e-0a60-44c3-bc92-024100b179a6 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Task: {'id': task-3273631, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.230478] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75c4ead1-dd1c-45c9-90ff-ed7684d543d6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.242099] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1916bdfa-6cf0-4705-bc19-cf77f9b47b4d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.303256] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d8de00a-e7e6-4bd7-baf2-a58e4c071889 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.315377] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa7cb7c7-3c8e-41a7-aaa7-3d5816942191 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.338271] env[69328]: DEBUG nova.compute.provider_tree [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 952.448975] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Acquiring lock "7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 952.449584] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Lock "7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 952.456262] env[69328]: DEBUG oslo_concurrency.lockutils [None req-91666333-c907-4039-a376-7c9c991414df tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "f1be93b2-08db-41fe-87c4-f4e5f964cfa4" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.070s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 952.627073] env[69328]: DEBUG oslo_vmware.api [None req-0b85c47e-a51c-4c3c-8b33-4882b1ed4cfb tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273630, 'name': PowerOnVM_Task, 'duration_secs': 0.460069} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.627073] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b85c47e-a51c-4c3c-8b33-4882b1ed4cfb tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 952.627073] env[69328]: DEBUG nova.compute.manager [None req-0b85c47e-a51c-4c3c-8b33-4882b1ed4cfb tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 952.627073] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8187b707-4c1c-442e-b433-d0c2d8287d7b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.648752] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "refresh_cache-9ad2b2e3-460a-403e-bfc7-f46648c93849" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 952.649356] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquired lock "refresh_cache-9ad2b2e3-460a-403e-bfc7-f46648c93849" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 952.649356] env[69328]: DEBUG nova.network.neutron [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 952.705017] env[69328]: DEBUG oslo_vmware.api [None req-8b9e830e-0a60-44c3-bc92-024100b179a6 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Task: {'id': task-3273631, 'name': ReconfigVM_Task, 'duration_secs': 0.238638} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.705017] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b9e830e-0a60-44c3-bc92-024100b179a6 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Reconfigured VM instance instance-0000002c to detach disk 2000 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 952.709932] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a98445af-8a70-40b6-8be2-ec1334db522a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.727105] env[69328]: DEBUG oslo_vmware.api [None req-8b9e830e-0a60-44c3-bc92-024100b179a6 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Waiting for the task: (returnval){ [ 952.727105] env[69328]: value = "task-3273632" [ 952.727105] env[69328]: _type = "Task" [ 952.727105] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.736272] env[69328]: DEBUG oslo_vmware.api [None req-8b9e830e-0a60-44c3-bc92-024100b179a6 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Task: {'id': task-3273632, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.809355] env[69328]: DEBUG nova.compute.manager [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 952.838645] env[69328]: DEBUG nova.virt.hardware [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 952.838875] env[69328]: DEBUG nova.virt.hardware [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 952.839032] env[69328]: DEBUG nova.virt.hardware [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 952.839213] env[69328]: DEBUG nova.virt.hardware [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 952.839351] env[69328]: DEBUG nova.virt.hardware [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 952.839598] env[69328]: DEBUG nova.virt.hardware [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 952.839876] env[69328]: DEBUG nova.virt.hardware [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 952.840052] env[69328]: DEBUG nova.virt.hardware [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 952.840218] env[69328]: DEBUG nova.virt.hardware [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 952.840374] env[69328]: DEBUG nova.virt.hardware [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 952.840562] env[69328]: DEBUG nova.virt.hardware [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 952.841406] env[69328]: DEBUG nova.scheduler.client.report [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 952.848668] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cab76f8-7e26-42df-a932-5b5048d01045 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.858313] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74cb3f81-cf02-41fb-ae5d-b2915339e7f8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.936511] env[69328]: DEBUG nova.objects.instance [None req-79c9cca0-5b05-40b2-89e9-7ac130644c6f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lazy-loading 'flavor' on Instance uuid f1be93b2-08db-41fe-87c4-f4e5f964cfa4 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 952.955190] env[69328]: DEBUG nova.compute.manager [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] [instance: 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 953.140857] env[69328]: DEBUG oslo_concurrency.lockutils [None req-773aa48b-a3af-4641-9c01-66d8aed29949 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Acquiring lock "8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 953.141515] env[69328]: DEBUG oslo_concurrency.lockutils [None req-773aa48b-a3af-4641-9c01-66d8aed29949 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Lock "8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 953.141515] env[69328]: DEBUG oslo_concurrency.lockutils [None req-773aa48b-a3af-4641-9c01-66d8aed29949 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Acquiring lock "8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 953.141697] env[69328]: DEBUG oslo_concurrency.lockutils [None req-773aa48b-a3af-4641-9c01-66d8aed29949 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Lock "8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 953.141779] env[69328]: DEBUG oslo_concurrency.lockutils [None req-773aa48b-a3af-4641-9c01-66d8aed29949 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Lock "8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 953.149017] env[69328]: INFO nova.compute.manager [None req-773aa48b-a3af-4641-9c01-66d8aed29949 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Terminating instance [ 953.191022] env[69328]: DEBUG nova.network.neutron [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 953.246172] env[69328]: DEBUG oslo_vmware.api [None req-8b9e830e-0a60-44c3-bc92-024100b179a6 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Task: {'id': task-3273632, 'name': ReconfigVM_Task, 'duration_secs': 0.300715} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.246568] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b9e830e-0a60-44c3-bc92-024100b179a6 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653727', 'volume_id': 'aa5050fe-f367-4822-9aa7-4bfac9106402', 'name': 'volume-aa5050fe-f367-4822-9aa7-4bfac9106402', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '18022645-9a2a-489e-b0b1-486165f46f14', 'attached_at': '', 'detached_at': '', 'volume_id': 'aa5050fe-f367-4822-9aa7-4bfac9106402', 'serial': 'aa5050fe-f367-4822-9aa7-4bfac9106402'} {{(pid=69328) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 953.246907] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8b9e830e-0a60-44c3-bc92-024100b179a6 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 953.247820] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20a0f5f9-b285-42e2-9303-55a6bc2528d1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.257190] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8b9e830e-0a60-44c3-bc92-024100b179a6 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 953.257525] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-13bc63a2-d1e6-451f-a043-1c8e1cec9a72 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.331417] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8b9e830e-0a60-44c3-bc92-024100b179a6 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 953.334047] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8b9e830e-0a60-44c3-bc92-024100b179a6 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 953.334047] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b9e830e-0a60-44c3-bc92-024100b179a6 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Deleting the datastore file [datastore1] 18022645-9a2a-489e-b0b1-486165f46f14 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 953.334047] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-822fa9a8-7ce2-4e72-9531-599d997ca4bc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.341674] env[69328]: DEBUG oslo_vmware.api [None req-8b9e830e-0a60-44c3-bc92-024100b179a6 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Waiting for the task: (returnval){ [ 953.341674] env[69328]: value = "task-3273634" [ 953.341674] env[69328]: _type = "Task" [ 953.341674] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.353021] env[69328]: DEBUG oslo_concurrency.lockutils [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.599s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 953.354033] env[69328]: DEBUG nova.compute.manager [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 953.357177] env[69328]: DEBUG oslo_vmware.api [None req-8b9e830e-0a60-44c3-bc92-024100b179a6 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Task: {'id': task-3273634, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.357888] env[69328]: DEBUG nova.network.neutron [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] Updating instance_info_cache with network_info: [{"id": "322eba88-5363-41a8-a44d-50e0a7fdf92e", "address": "fa:16:3e:05:5d:fc", "network": {"id": "7f5dcab4-3cec-42a1-b589-88cb373af645", "bridge": "br-int", "label": "tempest-ServersTestJSON-1833802368-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d86de4d5055642aa86a29c6768e3db46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b94712a6-b777-47dd-bc06-f9acfce2d936", "external-id": "nsx-vlan-transportzone-494", "segmentation_id": 494, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap322eba88-53", "ovs_interfaceid": "322eba88-5363-41a8-a44d-50e0a7fdf92e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 953.360068] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8de76a72-155b-4048-ab4a-181ef8187ce3 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.338s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 953.360377] env[69328]: DEBUG nova.objects.instance [None req-8de76a72-155b-4048-ab4a-181ef8187ce3 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lazy-loading 'resources' on Instance uuid 0a485411-3206-4674-90e4-58df4a8b755a {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 953.433086] env[69328]: DEBUG nova.compute.manager [req-f99fd331-935b-452e-9f68-7988ec72c7d2 req-fceb7e6c-bbe8-48c8-8806-c6f45fda9e7b service nova] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Received event network-vif-plugged-d97e62a9-59f8-4f3b-9296-f5a0803d2b10 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 953.433320] env[69328]: DEBUG oslo_concurrency.lockutils [req-f99fd331-935b-452e-9f68-7988ec72c7d2 req-fceb7e6c-bbe8-48c8-8806-c6f45fda9e7b service nova] Acquiring lock "96f604a9-e42c-4aa8-b5b5-edcb34901d94-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 953.433549] env[69328]: DEBUG oslo_concurrency.lockutils [req-f99fd331-935b-452e-9f68-7988ec72c7d2 req-fceb7e6c-bbe8-48c8-8806-c6f45fda9e7b service nova] Lock "96f604a9-e42c-4aa8-b5b5-edcb34901d94-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 953.433728] env[69328]: DEBUG oslo_concurrency.lockutils [req-f99fd331-935b-452e-9f68-7988ec72c7d2 req-fceb7e6c-bbe8-48c8-8806-c6f45fda9e7b service nova] Lock "96f604a9-e42c-4aa8-b5b5-edcb34901d94-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 953.433895] env[69328]: DEBUG nova.compute.manager [req-f99fd331-935b-452e-9f68-7988ec72c7d2 req-fceb7e6c-bbe8-48c8-8806-c6f45fda9e7b service nova] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] No waiting events found dispatching network-vif-plugged-d97e62a9-59f8-4f3b-9296-f5a0803d2b10 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 953.434859] env[69328]: WARNING nova.compute.manager [req-f99fd331-935b-452e-9f68-7988ec72c7d2 req-fceb7e6c-bbe8-48c8-8806-c6f45fda9e7b service nova] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Received unexpected event network-vif-plugged-d97e62a9-59f8-4f3b-9296-f5a0803d2b10 for instance with vm_state building and task_state spawning. [ 953.450850] env[69328]: DEBUG oslo_concurrency.lockutils [None req-79c9cca0-5b05-40b2-89e9-7ac130644c6f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "refresh_cache-f1be93b2-08db-41fe-87c4-f4e5f964cfa4" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 953.451059] env[69328]: DEBUG oslo_concurrency.lockutils [None req-79c9cca0-5b05-40b2-89e9-7ac130644c6f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquired lock "refresh_cache-f1be93b2-08db-41fe-87c4-f4e5f964cfa4" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 953.451242] env[69328]: DEBUG nova.network.neutron [None req-79c9cca0-5b05-40b2-89e9-7ac130644c6f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 953.451451] env[69328]: DEBUG nova.objects.instance [None req-79c9cca0-5b05-40b2-89e9-7ac130644c6f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lazy-loading 'info_cache' on Instance uuid f1be93b2-08db-41fe-87c4-f4e5f964cfa4 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 953.486346] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 953.657606] env[69328]: DEBUG nova.network.neutron [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Successfully updated port: d97e62a9-59f8-4f3b-9296-f5a0803d2b10 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 953.659551] env[69328]: DEBUG nova.compute.manager [None req-773aa48b-a3af-4641-9c01-66d8aed29949 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 953.659551] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-773aa48b-a3af-4641-9c01-66d8aed29949 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 953.666637] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e23f72a8-592c-4294-a03e-4c6afa645e43 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.676562] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-773aa48b-a3af-4641-9c01-66d8aed29949 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 953.676854] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-11028c2a-1cc4-484b-9bbe-127a8bd9ba8c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.689972] env[69328]: DEBUG oslo_vmware.api [None req-773aa48b-a3af-4641-9c01-66d8aed29949 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Waiting for the task: (returnval){ [ 953.689972] env[69328]: value = "task-3273635" [ 953.689972] env[69328]: _type = "Task" [ 953.689972] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.704636] env[69328]: DEBUG oslo_vmware.api [None req-773aa48b-a3af-4641-9c01-66d8aed29949 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Task: {'id': task-3273635, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.857720] env[69328]: DEBUG oslo_vmware.api [None req-8b9e830e-0a60-44c3-bc92-024100b179a6 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Task: {'id': task-3273634, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144742} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.857720] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b9e830e-0a60-44c3-bc92-024100b179a6 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 953.857720] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8b9e830e-0a60-44c3-bc92-024100b179a6 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 953.857720] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8b9e830e-0a60-44c3-bc92-024100b179a6 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 953.857720] env[69328]: INFO nova.compute.manager [None req-8b9e830e-0a60-44c3-bc92-024100b179a6 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Took 2.36 seconds to destroy the instance on the hypervisor. [ 953.857720] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8b9e830e-0a60-44c3-bc92-024100b179a6 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 953.857720] env[69328]: DEBUG nova.compute.manager [-] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 953.857720] env[69328]: DEBUG nova.network.neutron [-] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 953.864768] env[69328]: DEBUG nova.compute.utils [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 953.865268] env[69328]: DEBUG nova.compute.manager [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 953.865485] env[69328]: DEBUG nova.network.neutron [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 953.876089] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Releasing lock "refresh_cache-9ad2b2e3-460a-403e-bfc7-f46648c93849" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 953.876089] env[69328]: DEBUG nova.compute.manager [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] Instance network_info: |[{"id": "322eba88-5363-41a8-a44d-50e0a7fdf92e", "address": "fa:16:3e:05:5d:fc", "network": {"id": "7f5dcab4-3cec-42a1-b589-88cb373af645", "bridge": "br-int", "label": "tempest-ServersTestJSON-1833802368-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d86de4d5055642aa86a29c6768e3db46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b94712a6-b777-47dd-bc06-f9acfce2d936", "external-id": "nsx-vlan-transportzone-494", "segmentation_id": 494, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap322eba88-53", "ovs_interfaceid": "322eba88-5363-41a8-a44d-50e0a7fdf92e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 953.878984] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:05:5d:fc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b94712a6-b777-47dd-bc06-f9acfce2d936', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '322eba88-5363-41a8-a44d-50e0a7fdf92e', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 953.891910] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 953.893618] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 953.893862] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-356cfd65-7299-42bc-9385-bc9c7e3b23da {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.919849] env[69328]: DEBUG oslo_vmware.rw_handles [None req-f94e39fa-c995-4316-94a4-8df2c241aba1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525a9ae2-6a0f-391a-8062-e9d8ee94119f/disk-0.vmdk. {{(pid=69328) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 953.921152] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aad4e08-f8ad-4011-a614-d99f25e69c2f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.930209] env[69328]: DEBUG oslo_vmware.rw_handles [None req-f94e39fa-c995-4316-94a4-8df2c241aba1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525a9ae2-6a0f-391a-8062-e9d8ee94119f/disk-0.vmdk is in state: ready. {{(pid=69328) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 953.930348] env[69328]: ERROR oslo_vmware.rw_handles [None req-f94e39fa-c995-4316-94a4-8df2c241aba1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525a9ae2-6a0f-391a-8062-e9d8ee94119f/disk-0.vmdk due to incomplete transfer. [ 953.931797] env[69328]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-6d4e1da9-2d6e-4ec3-b930-7ea3753e98d9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.933732] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 953.933732] env[69328]: value = "task-3273636" [ 953.933732] env[69328]: _type = "Task" [ 953.933732] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.943162] env[69328]: DEBUG oslo_vmware.rw_handles [None req-f94e39fa-c995-4316-94a4-8df2c241aba1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525a9ae2-6a0f-391a-8062-e9d8ee94119f/disk-0.vmdk. {{(pid=69328) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 953.943402] env[69328]: DEBUG nova.virt.vmwareapi.images [None req-f94e39fa-c995-4316-94a4-8df2c241aba1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Uploaded image 44b9c7fb-3d5c-4708-a4ae-a8a2aba3a7ca to the Glance image server {{(pid=69328) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 953.946674] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f94e39fa-c995-4316-94a4-8df2c241aba1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Destroying the VM {{(pid=69328) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 953.946674] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273636, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.947676] env[69328]: DEBUG nova.policy [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0ca24c1b09374feeaec13dfeeaf02d94', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6bad0df17bba4bc996fe5cf1faf23fad', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 953.951550] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-dcee21d9-fa2a-470c-b0b6-a2cb64fc5d13 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.958926] env[69328]: DEBUG nova.objects.base [None req-79c9cca0-5b05-40b2-89e9-7ac130644c6f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=69328) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 953.962257] env[69328]: DEBUG oslo_vmware.api [None req-f94e39fa-c995-4316-94a4-8df2c241aba1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 953.962257] env[69328]: value = "task-3273637" [ 953.962257] env[69328]: _type = "Task" [ 953.962257] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.978450] env[69328]: DEBUG oslo_vmware.api [None req-f94e39fa-c995-4316-94a4-8df2c241aba1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273637, 'name': Destroy_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.172170] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "refresh_cache-96f604a9-e42c-4aa8-b5b5-edcb34901d94" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.172170] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquired lock "refresh_cache-96f604a9-e42c-4aa8-b5b5-edcb34901d94" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 954.172170] env[69328]: DEBUG nova.network.neutron [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 954.188420] env[69328]: DEBUG oslo_concurrency.lockutils [None req-73b796c4-34b6-4b8d-8503-ae2b131f9fce tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "interface-36f6aab5-2774-402b-9db6-9912f2d5d473-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 954.188773] env[69328]: DEBUG oslo_concurrency.lockutils [None req-73b796c4-34b6-4b8d-8503-ae2b131f9fce tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "interface-36f6aab5-2774-402b-9db6-9912f2d5d473-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 954.189146] env[69328]: DEBUG nova.objects.instance [None req-73b796c4-34b6-4b8d-8503-ae2b131f9fce tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lazy-loading 'flavor' on Instance uuid 36f6aab5-2774-402b-9db6-9912f2d5d473 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 954.204324] env[69328]: DEBUG oslo_vmware.api [None req-773aa48b-a3af-4641-9c01-66d8aed29949 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Task: {'id': task-3273635, 'name': PowerOffVM_Task, 'duration_secs': 0.221681} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.204582] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-773aa48b-a3af-4641-9c01-66d8aed29949 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 954.204582] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-773aa48b-a3af-4641-9c01-66d8aed29949 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 954.204941] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9d4f70e6-69d5-44ed-9779-804a8b68a78d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.278531] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-773aa48b-a3af-4641-9c01-66d8aed29949 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 954.278861] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-773aa48b-a3af-4641-9c01-66d8aed29949 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 954.279142] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-773aa48b-a3af-4641-9c01-66d8aed29949 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Deleting the datastore file [datastore1] 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 954.279450] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6c8fd61b-f4f8-4a94-8c79-87246c245809 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.294131] env[69328]: DEBUG oslo_vmware.api [None req-773aa48b-a3af-4641-9c01-66d8aed29949 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Waiting for the task: (returnval){ [ 954.294131] env[69328]: value = "task-3273639" [ 954.294131] env[69328]: _type = "Task" [ 954.294131] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.303653] env[69328]: DEBUG oslo_vmware.api [None req-773aa48b-a3af-4641-9c01-66d8aed29949 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Task: {'id': task-3273639, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.368243] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb745cc3-a2ba-43b7-b259-793af559fe19 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.379122] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4718eb69-c433-42a8-8044-234e49884c61 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.383449] env[69328]: DEBUG nova.compute.manager [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 954.423074] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5369699d-ba2d-46df-aa39-9e4d507fcbf5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.433750] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e787425-6c7b-4d94-a918-f51eb724488c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.464720] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273636, 'name': CreateVM_Task, 'duration_secs': 0.388882} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.468679] env[69328]: DEBUG nova.compute.provider_tree [None req-8de76a72-155b-4048-ab4a-181ef8187ce3 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 954.470095] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 954.474555] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.474716] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 954.475055] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 954.476062] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0c7a813-1ae3-4d01-9262-22be274634cd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.482463] env[69328]: DEBUG oslo_vmware.api [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 954.482463] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e69a0b-672f-8b47-db59-2ff75eb533c3" [ 954.482463] env[69328]: _type = "Task" [ 954.482463] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.487486] env[69328]: DEBUG oslo_vmware.api [None req-f94e39fa-c995-4316-94a4-8df2c241aba1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273637, 'name': Destroy_Task, 'duration_secs': 0.363016} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.493046] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-f94e39fa-c995-4316-94a4-8df2c241aba1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Destroyed the VM [ 954.493313] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f94e39fa-c995-4316-94a4-8df2c241aba1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Deleting Snapshot of the VM instance {{(pid=69328) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 954.493835] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-780cff94-7894-4755-83bc-ed10e8c0139d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.503149] env[69328]: DEBUG oslo_vmware.api [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e69a0b-672f-8b47-db59-2ff75eb533c3, 'name': SearchDatastore_Task, 'duration_secs': 0.011764} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.504639] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 954.505107] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 954.505208] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.505415] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 954.505454] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 954.505764] env[69328]: DEBUG oslo_vmware.api [None req-f94e39fa-c995-4316-94a4-8df2c241aba1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 954.505764] env[69328]: value = "task-3273640" [ 954.505764] env[69328]: _type = "Task" [ 954.505764] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.505941] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e0b9a87f-70c8-4d87-84f4-eeee2f4e0251 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.520930] env[69328]: DEBUG oslo_vmware.api [None req-f94e39fa-c995-4316-94a4-8df2c241aba1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273640, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.522332] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 954.522638] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 954.523509] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e66b4df2-853f-42f1-9ddb-a761d9c9c03a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.532591] env[69328]: DEBUG oslo_vmware.api [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 954.532591] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52417fa5-a537-7dab-9ef9-4d985e4e5baf" [ 954.532591] env[69328]: _type = "Task" [ 954.532591] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.540990] env[69328]: DEBUG oslo_vmware.api [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52417fa5-a537-7dab-9ef9-4d985e4e5baf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.579939] env[69328]: DEBUG nova.compute.manager [req-ac4c9e40-076c-4837-9fd7-4dbe581315b7 req-899f47c7-b98f-4777-8e69-443799379763 service nova] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] Received event network-changed-322eba88-5363-41a8-a44d-50e0a7fdf92e {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 954.579939] env[69328]: DEBUG nova.compute.manager [req-ac4c9e40-076c-4837-9fd7-4dbe581315b7 req-899f47c7-b98f-4777-8e69-443799379763 service nova] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] Refreshing instance network info cache due to event network-changed-322eba88-5363-41a8-a44d-50e0a7fdf92e. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 954.580564] env[69328]: DEBUG oslo_concurrency.lockutils [req-ac4c9e40-076c-4837-9fd7-4dbe581315b7 req-899f47c7-b98f-4777-8e69-443799379763 service nova] Acquiring lock "refresh_cache-9ad2b2e3-460a-403e-bfc7-f46648c93849" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.580763] env[69328]: DEBUG oslo_concurrency.lockutils [req-ac4c9e40-076c-4837-9fd7-4dbe581315b7 req-899f47c7-b98f-4777-8e69-443799379763 service nova] Acquired lock "refresh_cache-9ad2b2e3-460a-403e-bfc7-f46648c93849" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 954.581237] env[69328]: DEBUG nova.network.neutron [req-ac4c9e40-076c-4837-9fd7-4dbe581315b7 req-899f47c7-b98f-4777-8e69-443799379763 service nova] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] Refreshing network info cache for port 322eba88-5363-41a8-a44d-50e0a7fdf92e {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 954.649220] env[69328]: DEBUG nova.network.neutron [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Successfully created port: dce2dda5-86c0-4f69-a3c1-c0f7a5c3b56e {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 954.718752] env[69328]: DEBUG nova.network.neutron [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 954.802762] env[69328]: DEBUG oslo_vmware.api [None req-773aa48b-a3af-4641-9c01-66d8aed29949 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Task: {'id': task-3273639, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.158828} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.803931] env[69328]: DEBUG nova.network.neutron [None req-79c9cca0-5b05-40b2-89e9-7ac130644c6f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Updating instance_info_cache with network_info: [{"id": "1018560a-13d7-4d01-8fc4-03d0b9beab90", "address": "fa:16:3e:33:ba:27", "network": {"id": "4031def8-0553-461f-9528-aa338b9d7b2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1834835647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.196", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f357b5a9494b4849a83aa934c5d4e26b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "357d2811-e990-4985-9f9e-b158d10d3699", "external-id": "nsx-vlan-transportzone-641", "segmentation_id": 641, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1018560a-13", "ovs_interfaceid": "1018560a-13d7-4d01-8fc4-03d0b9beab90", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 954.805867] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-773aa48b-a3af-4641-9c01-66d8aed29949 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 954.806227] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-773aa48b-a3af-4641-9c01-66d8aed29949 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 954.806435] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-773aa48b-a3af-4641-9c01-66d8aed29949 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 954.806673] env[69328]: INFO nova.compute.manager [None req-773aa48b-a3af-4641-9c01-66d8aed29949 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Took 1.15 seconds to destroy the instance on the hypervisor. [ 954.807479] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-773aa48b-a3af-4641-9c01-66d8aed29949 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 954.807479] env[69328]: DEBUG nova.compute.manager [-] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 954.807479] env[69328]: DEBUG nova.network.neutron [-] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 954.834914] env[69328]: DEBUG nova.objects.instance [None req-73b796c4-34b6-4b8d-8503-ae2b131f9fce tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lazy-loading 'pci_requests' on Instance uuid 36f6aab5-2774-402b-9db6-9912f2d5d473 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 954.963585] env[69328]: DEBUG nova.network.neutron [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Updating instance_info_cache with network_info: [{"id": "d97e62a9-59f8-4f3b-9296-f5a0803d2b10", "address": "fa:16:3e:81:01:fa", "network": {"id": "e2ab6957-2c9d-4b95-91bd-5a62d9a284ba", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-967470666-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75d5853e3c724d02bacfa75173e38ab3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abcf0d10-3f3f-45dc-923e-1c78766e2dad", "external-id": "nsx-vlan-transportzone-405", "segmentation_id": 405, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd97e62a9-59", "ovs_interfaceid": "d97e62a9-59f8-4f3b-9296-f5a0803d2b10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 954.975272] env[69328]: DEBUG nova.network.neutron [-] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 954.978639] env[69328]: DEBUG nova.scheduler.client.report [None req-8de76a72-155b-4048-ab4a-181ef8187ce3 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 955.022885] env[69328]: DEBUG oslo_vmware.api [None req-f94e39fa-c995-4316-94a4-8df2c241aba1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273640, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.045064] env[69328]: DEBUG oslo_vmware.api [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52417fa5-a537-7dab-9ef9-4d985e4e5baf, 'name': SearchDatastore_Task, 'duration_secs': 0.01048} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.045964] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b80adf02-29e8-4060-adf7-2feca5b40ac4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.053216] env[69328]: DEBUG oslo_vmware.api [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 955.053216] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52cffad6-c6cd-ef00-b955-3737a6023207" [ 955.053216] env[69328]: _type = "Task" [ 955.053216] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.062382] env[69328]: DEBUG oslo_vmware.api [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52cffad6-c6cd-ef00-b955-3737a6023207, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.308158] env[69328]: DEBUG oslo_concurrency.lockutils [None req-79c9cca0-5b05-40b2-89e9-7ac130644c6f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Releasing lock "refresh_cache-f1be93b2-08db-41fe-87c4-f4e5f964cfa4" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 955.340726] env[69328]: DEBUG nova.objects.base [None req-73b796c4-34b6-4b8d-8503-ae2b131f9fce tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Object Instance<36f6aab5-2774-402b-9db6-9912f2d5d473> lazy-loaded attributes: flavor,pci_requests {{(pid=69328) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 955.340726] env[69328]: DEBUG nova.network.neutron [None req-73b796c4-34b6-4b8d-8503-ae2b131f9fce tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 955.379130] env[69328]: DEBUG nova.policy [None req-73b796c4-34b6-4b8d-8503-ae2b131f9fce tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '69ca01fd1d0f42b0b05a5426da9753ad', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '30209bc93a4042488f15c73b7e4733d5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 955.395961] env[69328]: DEBUG nova.compute.manager [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 955.421494] env[69328]: DEBUG nova.virt.hardware [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 955.421811] env[69328]: DEBUG nova.virt.hardware [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 955.421976] env[69328]: DEBUG nova.virt.hardware [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 955.422178] env[69328]: DEBUG nova.virt.hardware [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 955.422325] env[69328]: DEBUG nova.virt.hardware [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 955.422507] env[69328]: DEBUG nova.virt.hardware [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 955.422714] env[69328]: DEBUG nova.virt.hardware [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 955.423021] env[69328]: DEBUG nova.virt.hardware [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 955.424052] env[69328]: DEBUG nova.virt.hardware [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 955.424052] env[69328]: DEBUG nova.virt.hardware [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 955.424052] env[69328]: DEBUG nova.virt.hardware [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 955.424417] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f921dbd0-8c72-45a6-a673-9692290af07f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.436798] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe342b4f-d8ac-45cf-81e6-f3cfa43c1f4b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.466946] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Releasing lock "refresh_cache-96f604a9-e42c-4aa8-b5b5-edcb34901d94" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 955.467315] env[69328]: DEBUG nova.compute.manager [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Instance network_info: |[{"id": "d97e62a9-59f8-4f3b-9296-f5a0803d2b10", "address": "fa:16:3e:81:01:fa", "network": {"id": "e2ab6957-2c9d-4b95-91bd-5a62d9a284ba", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-967470666-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75d5853e3c724d02bacfa75173e38ab3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abcf0d10-3f3f-45dc-923e-1c78766e2dad", "external-id": "nsx-vlan-transportzone-405", "segmentation_id": 405, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd97e62a9-59", "ovs_interfaceid": "d97e62a9-59f8-4f3b-9296-f5a0803d2b10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 955.471218] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:81:01:fa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'abcf0d10-3f3f-45dc-923e-1c78766e2dad', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd97e62a9-59f8-4f3b-9296-f5a0803d2b10', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 955.476901] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 955.477153] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 955.477373] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-eb0eed3b-89f1-40b5-9021-8272135d4248 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.497379] env[69328]: INFO nova.compute.manager [-] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Took 1.64 seconds to deallocate network for instance. [ 955.497379] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8de76a72-155b-4048-ab4a-181ef8187ce3 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.137s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 955.503818] env[69328]: DEBUG oslo_concurrency.lockutils [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.963s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 955.505058] env[69328]: INFO nova.compute.claims [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 955.516048] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 955.516048] env[69328]: value = "task-3273641" [ 955.516048] env[69328]: _type = "Task" [ 955.516048] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.520106] env[69328]: DEBUG oslo_vmware.api [None req-f94e39fa-c995-4316-94a4-8df2c241aba1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273640, 'name': RemoveSnapshot_Task, 'duration_secs': 0.650232} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.524089] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f94e39fa-c995-4316-94a4-8df2c241aba1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Deleted Snapshot of the VM instance {{(pid=69328) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 955.525164] env[69328]: INFO nova.compute.manager [None req-f94e39fa-c995-4316-94a4-8df2c241aba1 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Took 14.47 seconds to snapshot the instance on the hypervisor. [ 955.534535] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273641, 'name': CreateVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.547132] env[69328]: INFO nova.scheduler.client.report [None req-8de76a72-155b-4048-ab4a-181ef8187ce3 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Deleted allocations for instance 0a485411-3206-4674-90e4-58df4a8b755a [ 955.569391] env[69328]: DEBUG oslo_vmware.api [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52cffad6-c6cd-ef00-b955-3737a6023207, 'name': SearchDatastore_Task, 'duration_secs': 0.024727} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.574022] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 955.574022] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 9ad2b2e3-460a-403e-bfc7-f46648c93849/9ad2b2e3-460a-403e-bfc7-f46648c93849.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 955.574022] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2e7096e5-c2ca-4103-b1c5-a908f6cf39f2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.582687] env[69328]: DEBUG oslo_vmware.api [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 955.582687] env[69328]: value = "task-3273642" [ 955.582687] env[69328]: _type = "Task" [ 955.582687] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.593201] env[69328]: DEBUG oslo_vmware.api [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273642, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.778983] env[69328]: DEBUG nova.network.neutron [None req-73b796c4-34b6-4b8d-8503-ae2b131f9fce tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Successfully created port: 3cd046f6-ec54-42ea-acae-2410cbcf3a47 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 955.784455] env[69328]: DEBUG nova.network.neutron [req-ac4c9e40-076c-4837-9fd7-4dbe581315b7 req-899f47c7-b98f-4777-8e69-443799379763 service nova] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] Updated VIF entry in instance network info cache for port 322eba88-5363-41a8-a44d-50e0a7fdf92e. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 955.784686] env[69328]: DEBUG nova.network.neutron [req-ac4c9e40-076c-4837-9fd7-4dbe581315b7 req-899f47c7-b98f-4777-8e69-443799379763 service nova] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] Updating instance_info_cache with network_info: [{"id": "322eba88-5363-41a8-a44d-50e0a7fdf92e", "address": "fa:16:3e:05:5d:fc", "network": {"id": "7f5dcab4-3cec-42a1-b589-88cb373af645", "bridge": "br-int", "label": "tempest-ServersTestJSON-1833802368-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d86de4d5055642aa86a29c6768e3db46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b94712a6-b777-47dd-bc06-f9acfce2d936", "external-id": "nsx-vlan-transportzone-494", "segmentation_id": 494, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap322eba88-53", "ovs_interfaceid": "322eba88-5363-41a8-a44d-50e0a7fdf92e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 956.009389] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Acquiring lock "772ab9b3-23ac-46c6-acb1-af0b2726fd90" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 956.009621] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Lock "772ab9b3-23ac-46c6-acb1-af0b2726fd90" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 956.037100] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273641, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.062858] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8de76a72-155b-4048-ab4a-181ef8187ce3 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "0a485411-3206-4674-90e4-58df4a8b755a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.245s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 956.070721] env[69328]: INFO nova.compute.manager [None req-8b9e830e-0a60-44c3-bc92-024100b179a6 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Took 0.57 seconds to detach 1 volumes for instance. [ 956.072211] env[69328]: DEBUG nova.compute.manager [None req-8b9e830e-0a60-44c3-bc92-024100b179a6 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Deleting volume: aa5050fe-f367-4822-9aa7-4bfac9106402 {{(pid=69328) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 956.094807] env[69328]: DEBUG oslo_vmware.api [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273642, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.496162} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.095160] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 9ad2b2e3-460a-403e-bfc7-f46648c93849/9ad2b2e3-460a-403e-bfc7-f46648c93849.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 956.095694] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 956.096356] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6fc8ed5f-fffd-4920-92a8-62f7629e72ee {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.105201] env[69328]: DEBUG oslo_vmware.api [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 956.105201] env[69328]: value = "task-3273643" [ 956.105201] env[69328]: _type = "Task" [ 956.105201] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.116150] env[69328]: DEBUG oslo_vmware.api [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273643, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.157065] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9f77844c-f748-4d64-ad38-fcb4c51d97ec tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "3ba646e8-a5c8-4917-a1c4-32b37affb598" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 956.157400] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9f77844c-f748-4d64-ad38-fcb4c51d97ec tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "3ba646e8-a5c8-4917-a1c4-32b37affb598" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 956.290753] env[69328]: DEBUG oslo_concurrency.lockutils [req-ac4c9e40-076c-4837-9fd7-4dbe581315b7 req-899f47c7-b98f-4777-8e69-443799379763 service nova] Releasing lock "refresh_cache-9ad2b2e3-460a-403e-bfc7-f46648c93849" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 956.317679] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-79c9cca0-5b05-40b2-89e9-7ac130644c6f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 956.318550] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-29c60816-65ec-4871-9375-efabdae206ca {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.328097] env[69328]: DEBUG oslo_vmware.api [None req-79c9cca0-5b05-40b2-89e9-7ac130644c6f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 956.328097] env[69328]: value = "task-3273645" [ 956.328097] env[69328]: _type = "Task" [ 956.328097] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.340668] env[69328]: DEBUG nova.network.neutron [-] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 956.340668] env[69328]: DEBUG oslo_vmware.api [None req-79c9cca0-5b05-40b2-89e9-7ac130644c6f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3273645, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.434879] env[69328]: DEBUG nova.network.neutron [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Successfully updated port: dce2dda5-86c0-4f69-a3c1-c0f7a5c3b56e {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 956.513965] env[69328]: DEBUG nova.compute.manager [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 956.532877] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273641, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.620024] env[69328]: DEBUG oslo_vmware.api [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273643, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.171832} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.620820] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 956.621727] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d286f10-89f4-4f2f-8590-554f97727aac {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.640455] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8b9e830e-0a60-44c3-bc92-024100b179a6 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 956.650711] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] Reconfiguring VM instance instance-00000050 to attach disk [datastore2] 9ad2b2e3-460a-403e-bfc7-f46648c93849/9ad2b2e3-460a-403e-bfc7-f46648c93849.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 956.653963] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bb8a07ce-80a0-4574-995b-c427aaa95e7f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.670686] env[69328]: DEBUG nova.compute.utils [None req-9f77844c-f748-4d64-ad38-fcb4c51d97ec tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 956.683359] env[69328]: DEBUG oslo_vmware.api [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 956.683359] env[69328]: value = "task-3273646" [ 956.683359] env[69328]: _type = "Task" [ 956.683359] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.693462] env[69328]: DEBUG oslo_vmware.api [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273646, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.838677] env[69328]: DEBUG oslo_vmware.api [None req-79c9cca0-5b05-40b2-89e9-7ac130644c6f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3273645, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.841817] env[69328]: INFO nova.compute.manager [-] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Took 2.03 seconds to deallocate network for instance. [ 956.888903] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fc501af-a601-45c9-a158-a65a8f6005a9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.897974] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7f42148-bfb6-4423-aa25-71e47f3434cc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.930259] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f05a5ad-f67d-413b-b08b-525360334b9f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.937902] env[69328]: DEBUG oslo_concurrency.lockutils [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquiring lock "refresh_cache-65e38a02-880b-46e2-8866-645a9fc17c7a" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.938108] env[69328]: DEBUG oslo_concurrency.lockutils [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquired lock "refresh_cache-65e38a02-880b-46e2-8866-645a9fc17c7a" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 956.938234] env[69328]: DEBUG nova.network.neutron [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 956.941235] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c98f74b-d56b-4914-99de-6eac7279452e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.955287] env[69328]: DEBUG nova.compute.provider_tree [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 957.035555] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273641, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.041128] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 957.174094] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9f77844c-f748-4d64-ad38-fcb4c51d97ec tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "3ba646e8-a5c8-4917-a1c4-32b37affb598" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.016s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 957.194312] env[69328]: DEBUG oslo_vmware.api [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273646, 'name': ReconfigVM_Task, 'duration_secs': 0.3169} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.194579] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] Reconfigured VM instance instance-00000050 to attach disk [datastore2] 9ad2b2e3-460a-403e-bfc7-f46648c93849/9ad2b2e3-460a-403e-bfc7-f46648c93849.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 957.195227] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b8324444-224b-48c0-bd98-638a44376af5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.203940] env[69328]: DEBUG oslo_vmware.api [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 957.203940] env[69328]: value = "task-3273647" [ 957.203940] env[69328]: _type = "Task" [ 957.203940] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.216401] env[69328]: DEBUG oslo_vmware.api [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273647, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.338115] env[69328]: DEBUG oslo_vmware.api [None req-79c9cca0-5b05-40b2-89e9-7ac130644c6f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3273645, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.347241] env[69328]: DEBUG oslo_concurrency.lockutils [None req-773aa48b-a3af-4641-9c01-66d8aed29949 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 957.457944] env[69328]: DEBUG nova.scheduler.client.report [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 957.489343] env[69328]: DEBUG nova.network.neutron [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 957.533885] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273641, 'name': CreateVM_Task, 'duration_secs': 1.607704} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.533885] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 957.533885] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.533885] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 957.533885] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 957.534179] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-808ad662-ef2c-420e-87c5-99ae7014934e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.539196] env[69328]: DEBUG oslo_vmware.api [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 957.539196] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e606b3-0343-1346-afc9-875dd5c481cd" [ 957.539196] env[69328]: _type = "Task" [ 957.539196] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.547920] env[69328]: DEBUG oslo_vmware.api [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e606b3-0343-1346-afc9-875dd5c481cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.644889] env[69328]: DEBUG nova.network.neutron [None req-73b796c4-34b6-4b8d-8503-ae2b131f9fce tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Successfully updated port: 3cd046f6-ec54-42ea-acae-2410cbcf3a47 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 957.686341] env[69328]: DEBUG nova.network.neutron [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Updating instance_info_cache with network_info: [{"id": "dce2dda5-86c0-4f69-a3c1-c0f7a5c3b56e", "address": "fa:16:3e:db:a0:8f", "network": {"id": "77f17547-8c62-4a31-9840-8d2cbb1bfbab", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-163692077-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bad0df17bba4bc996fe5cf1faf23fad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdce2dda5-86", "ovs_interfaceid": "dce2dda5-86c0-4f69-a3c1-c0f7a5c3b56e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 957.718061] env[69328]: DEBUG oslo_vmware.api [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273647, 'name': Rename_Task, 'duration_secs': 0.15677} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.718364] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 957.718640] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-463d314a-fa88-4fa1-a495-a16daa04a7b1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.724714] env[69328]: DEBUG oslo_vmware.api [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 957.724714] env[69328]: value = "task-3273648" [ 957.724714] env[69328]: _type = "Task" [ 957.724714] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.735542] env[69328]: DEBUG oslo_vmware.api [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273648, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.814772] env[69328]: DEBUG nova.compute.manager [req-d9cf09cc-f969-48a2-875e-4d90a6a73755 req-c3ccbdf7-931c-4003-a824-0906b1a616fd service nova] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Received event network-changed-d97e62a9-59f8-4f3b-9296-f5a0803d2b10 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 957.814772] env[69328]: DEBUG nova.compute.manager [req-d9cf09cc-f969-48a2-875e-4d90a6a73755 req-c3ccbdf7-931c-4003-a824-0906b1a616fd service nova] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Refreshing instance network info cache due to event network-changed-d97e62a9-59f8-4f3b-9296-f5a0803d2b10. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 957.814924] env[69328]: DEBUG oslo_concurrency.lockutils [req-d9cf09cc-f969-48a2-875e-4d90a6a73755 req-c3ccbdf7-931c-4003-a824-0906b1a616fd service nova] Acquiring lock "refresh_cache-96f604a9-e42c-4aa8-b5b5-edcb34901d94" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.815258] env[69328]: DEBUG oslo_concurrency.lockutils [req-d9cf09cc-f969-48a2-875e-4d90a6a73755 req-c3ccbdf7-931c-4003-a824-0906b1a616fd service nova] Acquired lock "refresh_cache-96f604a9-e42c-4aa8-b5b5-edcb34901d94" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 957.815460] env[69328]: DEBUG nova.network.neutron [req-d9cf09cc-f969-48a2-875e-4d90a6a73755 req-c3ccbdf7-931c-4003-a824-0906b1a616fd service nova] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Refreshing network info cache for port d97e62a9-59f8-4f3b-9296-f5a0803d2b10 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 957.839473] env[69328]: DEBUG oslo_vmware.api [None req-79c9cca0-5b05-40b2-89e9-7ac130644c6f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3273645, 'name': PowerOnVM_Task, 'duration_secs': 1.400777} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.839768] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-79c9cca0-5b05-40b2-89e9-7ac130644c6f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 957.839966] env[69328]: DEBUG nova.compute.manager [None req-79c9cca0-5b05-40b2-89e9-7ac130644c6f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 957.840759] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ef1c385-6781-4ab4-b7e2-cb6dbe940654 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.964158] env[69328]: DEBUG oslo_concurrency.lockutils [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.461s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 957.964382] env[69328]: DEBUG nova.compute.manager [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 957.967435] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d76b6475-e53b-41ad-a9cf-58f1a146b704 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.333s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 957.967695] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d76b6475-e53b-41ad-a9cf-58f1a146b704 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 957.971049] env[69328]: DEBUG oslo_concurrency.lockutils [None req-aa421e2c-9c56-4abb-8b44-da66e527c182 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.807s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 957.971049] env[69328]: DEBUG nova.objects.instance [None req-aa421e2c-9c56-4abb-8b44-da66e527c182 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Lazy-loading 'resources' on Instance uuid 82e27131-b401-4885-83fb-825e5c8e2444 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 957.992517] env[69328]: INFO nova.scheduler.client.report [None req-d76b6475-e53b-41ad-a9cf-58f1a146b704 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Deleted allocations for instance fd72bae3-cb72-48d0-a0df-9ea3a770a86c [ 958.051397] env[69328]: DEBUG oslo_vmware.api [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e606b3-0343-1346-afc9-875dd5c481cd, 'name': SearchDatastore_Task, 'duration_secs': 0.011597} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.052548] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 958.052801] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 958.053176] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.053475] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 958.053537] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 958.055211] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-33bb65cb-3997-4338-9a4c-4868359db5ae {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.064781] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 958.064966] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 958.065818] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7523eed3-87eb-48b0-bf0d-b095cd40a04b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.071649] env[69328]: DEBUG oslo_vmware.api [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 958.071649] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d343d6-9368-9b51-63eb-14e74775348b" [ 958.071649] env[69328]: _type = "Task" [ 958.071649] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.080701] env[69328]: DEBUG oslo_vmware.api [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d343d6-9368-9b51-63eb-14e74775348b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.150548] env[69328]: DEBUG oslo_concurrency.lockutils [None req-73b796c4-34b6-4b8d-8503-ae2b131f9fce tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "refresh_cache-36f6aab5-2774-402b-9db6-9912f2d5d473" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.150548] env[69328]: DEBUG oslo_concurrency.lockutils [None req-73b796c4-34b6-4b8d-8503-ae2b131f9fce tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquired lock "refresh_cache-36f6aab5-2774-402b-9db6-9912f2d5d473" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 958.150955] env[69328]: DEBUG nova.network.neutron [None req-73b796c4-34b6-4b8d-8503-ae2b131f9fce tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 958.189778] env[69328]: DEBUG oslo_concurrency.lockutils [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Releasing lock "refresh_cache-65e38a02-880b-46e2-8866-645a9fc17c7a" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 958.190127] env[69328]: DEBUG nova.compute.manager [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Instance network_info: |[{"id": "dce2dda5-86c0-4f69-a3c1-c0f7a5c3b56e", "address": "fa:16:3e:db:a0:8f", "network": {"id": "77f17547-8c62-4a31-9840-8d2cbb1bfbab", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-163692077-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bad0df17bba4bc996fe5cf1faf23fad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdce2dda5-86", "ovs_interfaceid": "dce2dda5-86c0-4f69-a3c1-c0f7a5c3b56e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 958.190548] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:db:a0:8f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '62237242-7ce2-4664-a1c5-6783b516b507', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dce2dda5-86c0-4f69-a3c1-c0f7a5c3b56e', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 958.201168] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Creating folder: Project (6bad0df17bba4bc996fe5cf1faf23fad). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 958.201815] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4468696a-3595-4d14-87e1-bcbc638882c0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.214085] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Created folder: Project (6bad0df17bba4bc996fe5cf1faf23fad) in parent group-v653649. [ 958.214317] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Creating folder: Instances. Parent ref: group-v653872. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 958.214571] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0dd23eef-0fc9-4fa2-ae01-fc611e569595 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.222994] env[69328]: DEBUG nova.compute.manager [req-060169de-3f06-4db6-ad44-c2fbaf064568 req-ef335c6e-a671-4d7c-ac93-2a8e93c97f86 service nova] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Received event network-vif-plugged-dce2dda5-86c0-4f69-a3c1-c0f7a5c3b56e {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 958.223739] env[69328]: DEBUG oslo_concurrency.lockutils [req-060169de-3f06-4db6-ad44-c2fbaf064568 req-ef335c6e-a671-4d7c-ac93-2a8e93c97f86 service nova] Acquiring lock "65e38a02-880b-46e2-8866-645a9fc17c7a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 958.223739] env[69328]: DEBUG oslo_concurrency.lockutils [req-060169de-3f06-4db6-ad44-c2fbaf064568 req-ef335c6e-a671-4d7c-ac93-2a8e93c97f86 service nova] Lock "65e38a02-880b-46e2-8866-645a9fc17c7a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 958.223739] env[69328]: DEBUG oslo_concurrency.lockutils [req-060169de-3f06-4db6-ad44-c2fbaf064568 req-ef335c6e-a671-4d7c-ac93-2a8e93c97f86 service nova] Lock "65e38a02-880b-46e2-8866-645a9fc17c7a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 958.224023] env[69328]: DEBUG nova.compute.manager [req-060169de-3f06-4db6-ad44-c2fbaf064568 req-ef335c6e-a671-4d7c-ac93-2a8e93c97f86 service nova] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] No waiting events found dispatching network-vif-plugged-dce2dda5-86c0-4f69-a3c1-c0f7a5c3b56e {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 958.224023] env[69328]: WARNING nova.compute.manager [req-060169de-3f06-4db6-ad44-c2fbaf064568 req-ef335c6e-a671-4d7c-ac93-2a8e93c97f86 service nova] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Received unexpected event network-vif-plugged-dce2dda5-86c0-4f69-a3c1-c0f7a5c3b56e for instance with vm_state building and task_state spawning. [ 958.224762] env[69328]: DEBUG nova.compute.manager [req-060169de-3f06-4db6-ad44-c2fbaf064568 req-ef335c6e-a671-4d7c-ac93-2a8e93c97f86 service nova] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Received event network-changed-dce2dda5-86c0-4f69-a3c1-c0f7a5c3b56e {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 958.224762] env[69328]: DEBUG nova.compute.manager [req-060169de-3f06-4db6-ad44-c2fbaf064568 req-ef335c6e-a671-4d7c-ac93-2a8e93c97f86 service nova] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Refreshing instance network info cache due to event network-changed-dce2dda5-86c0-4f69-a3c1-c0f7a5c3b56e. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 958.224762] env[69328]: DEBUG oslo_concurrency.lockutils [req-060169de-3f06-4db6-ad44-c2fbaf064568 req-ef335c6e-a671-4d7c-ac93-2a8e93c97f86 service nova] Acquiring lock "refresh_cache-65e38a02-880b-46e2-8866-645a9fc17c7a" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.224762] env[69328]: DEBUG oslo_concurrency.lockutils [req-060169de-3f06-4db6-ad44-c2fbaf064568 req-ef335c6e-a671-4d7c-ac93-2a8e93c97f86 service nova] Acquired lock "refresh_cache-65e38a02-880b-46e2-8866-645a9fc17c7a" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 958.224762] env[69328]: DEBUG nova.network.neutron [req-060169de-3f06-4db6-ad44-c2fbaf064568 req-ef335c6e-a671-4d7c-ac93-2a8e93c97f86 service nova] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Refreshing network info cache for port dce2dda5-86c0-4f69-a3c1-c0f7a5c3b56e {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 958.227324] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Created folder: Instances in parent group-v653872. [ 958.227549] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 958.230779] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 958.231482] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fd83af54-0854-4890-be62-03903982fddd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.247889] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9f77844c-f748-4d64-ad38-fcb4c51d97ec tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "3ba646e8-a5c8-4917-a1c4-32b37affb598" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 958.248158] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9f77844c-f748-4d64-ad38-fcb4c51d97ec tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "3ba646e8-a5c8-4917-a1c4-32b37affb598" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 958.248379] env[69328]: INFO nova.compute.manager [None req-9f77844c-f748-4d64-ad38-fcb4c51d97ec tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Attaching volume a39fd325-7c9d-4482-b7a4-43b28bf52e5c to /dev/sdb [ 958.254345] env[69328]: DEBUG oslo_vmware.api [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273648, 'name': PowerOnVM_Task, 'duration_secs': 0.461115} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.256713] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 958.256883] env[69328]: INFO nova.compute.manager [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] Took 7.99 seconds to spawn the instance on the hypervisor. [ 958.257061] env[69328]: DEBUG nova.compute.manager [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 958.258362] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ff94e3b-5775-42e0-ac37-11119c1429f6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.262197] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 958.262197] env[69328]: value = "task-3273651" [ 958.262197] env[69328]: _type = "Task" [ 958.262197] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.272930] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273651, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.284166] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c84c3919-790b-47a0-bce1-dd1d553d50e2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.292666] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcebc994-5d2a-4d1d-a34c-4e95105e9b83 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.309102] env[69328]: DEBUG nova.virt.block_device [None req-9f77844c-f748-4d64-ad38-fcb4c51d97ec tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Updating existing volume attachment record: 4702adf2-5d53-4080-8084-717a6d6aa6ea {{(pid=69328) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 958.471289] env[69328]: DEBUG nova.compute.utils [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 958.471289] env[69328]: DEBUG nova.compute.manager [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 958.471289] env[69328]: DEBUG nova.network.neutron [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 958.503664] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d76b6475-e53b-41ad-a9cf-58f1a146b704 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "fd72bae3-cb72-48d0-a0df-9ea3a770a86c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.796s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 958.588100] env[69328]: DEBUG oslo_vmware.api [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d343d6-9368-9b51-63eb-14e74775348b, 'name': SearchDatastore_Task, 'duration_secs': 0.012593} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.589712] env[69328]: DEBUG nova.policy [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0ca24c1b09374feeaec13dfeeaf02d94', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6bad0df17bba4bc996fe5cf1faf23fad', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 958.598418] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-118d8a45-0358-4edd-a98f-e2624d5cd020 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.605144] env[69328]: DEBUG oslo_vmware.api [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 958.605144] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5240623a-f725-66c6-f071-50573276ae46" [ 958.605144] env[69328]: _type = "Task" [ 958.605144] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.613925] env[69328]: DEBUG oslo_vmware.api [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5240623a-f725-66c6-f071-50573276ae46, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.709737] env[69328]: WARNING nova.network.neutron [None req-73b796c4-34b6-4b8d-8503-ae2b131f9fce tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] aed15283-4a79-4e99-8b6c-49cf754138de already exists in list: networks containing: ['aed15283-4a79-4e99-8b6c-49cf754138de']. ignoring it [ 958.753193] env[69328]: DEBUG nova.network.neutron [req-d9cf09cc-f969-48a2-875e-4d90a6a73755 req-c3ccbdf7-931c-4003-a824-0906b1a616fd service nova] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Updated VIF entry in instance network info cache for port d97e62a9-59f8-4f3b-9296-f5a0803d2b10. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 958.753193] env[69328]: DEBUG nova.network.neutron [req-d9cf09cc-f969-48a2-875e-4d90a6a73755 req-c3ccbdf7-931c-4003-a824-0906b1a616fd service nova] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Updating instance_info_cache with network_info: [{"id": "d97e62a9-59f8-4f3b-9296-f5a0803d2b10", "address": "fa:16:3e:81:01:fa", "network": {"id": "e2ab6957-2c9d-4b95-91bd-5a62d9a284ba", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-967470666-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75d5853e3c724d02bacfa75173e38ab3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abcf0d10-3f3f-45dc-923e-1c78766e2dad", "external-id": "nsx-vlan-transportzone-405", "segmentation_id": 405, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd97e62a9-59", "ovs_interfaceid": "d97e62a9-59f8-4f3b-9296-f5a0803d2b10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 958.789533] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273651, 'name': CreateVM_Task, 'duration_secs': 0.377621} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.790142] env[69328]: INFO nova.compute.manager [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] Took 16.80 seconds to build instance. [ 958.791139] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 958.792715] env[69328]: DEBUG oslo_concurrency.lockutils [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.792715] env[69328]: DEBUG oslo_concurrency.lockutils [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 958.792715] env[69328]: DEBUG oslo_concurrency.lockutils [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 958.793907] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48c36a89-1024-4fc0-9edf-376385fde35e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.802543] env[69328]: DEBUG oslo_vmware.api [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 958.802543] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52dfda1a-add1-6500-d2a0-951073e613f6" [ 958.802543] env[69328]: _type = "Task" [ 958.802543] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.813106] env[69328]: DEBUG oslo_vmware.api [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52dfda1a-add1-6500-d2a0-951073e613f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.982440] env[69328]: DEBUG nova.compute.manager [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 959.015290] env[69328]: DEBUG nova.network.neutron [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] Successfully created port: 9fa28c73-cefa-44f5-a043-9e6ce86838c0 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 959.025840] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad5322d1-c829-4525-bbc0-7a9946b18412 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.037748] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-447b9d4a-d8c8-4038-95d7-b3e1d6e4959d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.081021] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d013f58e-7938-4d1a-a713-fa1c3973918a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.089802] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4d691b1-d849-4959-883e-ba8d435f1dbd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.105708] env[69328]: DEBUG nova.compute.provider_tree [None req-aa421e2c-9c56-4abb-8b44-da66e527c182 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 959.117975] env[69328]: DEBUG oslo_vmware.api [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5240623a-f725-66c6-f071-50573276ae46, 'name': SearchDatastore_Task, 'duration_secs': 0.010772} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.118244] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 959.118498] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 96f604a9-e42c-4aa8-b5b5-edcb34901d94/96f604a9-e42c-4aa8-b5b5-edcb34901d94.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 959.118754] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ed7331e7-3673-4ceb-89d7-86cf9f4790b0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.126299] env[69328]: DEBUG oslo_vmware.api [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 959.126299] env[69328]: value = "task-3273655" [ 959.126299] env[69328]: _type = "Task" [ 959.126299] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.138218] env[69328]: DEBUG oslo_vmware.api [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273655, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.141158] env[69328]: DEBUG nova.network.neutron [req-060169de-3f06-4db6-ad44-c2fbaf064568 req-ef335c6e-a671-4d7c-ac93-2a8e93c97f86 service nova] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Updated VIF entry in instance network info cache for port dce2dda5-86c0-4f69-a3c1-c0f7a5c3b56e. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 959.141520] env[69328]: DEBUG nova.network.neutron [req-060169de-3f06-4db6-ad44-c2fbaf064568 req-ef335c6e-a671-4d7c-ac93-2a8e93c97f86 service nova] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Updating instance_info_cache with network_info: [{"id": "dce2dda5-86c0-4f69-a3c1-c0f7a5c3b56e", "address": "fa:16:3e:db:a0:8f", "network": {"id": "77f17547-8c62-4a31-9840-8d2cbb1bfbab", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-163692077-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bad0df17bba4bc996fe5cf1faf23fad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdce2dda5-86", "ovs_interfaceid": "dce2dda5-86c0-4f69-a3c1-c0f7a5c3b56e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 959.200456] env[69328]: DEBUG nova.network.neutron [None req-73b796c4-34b6-4b8d-8503-ae2b131f9fce tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Updating instance_info_cache with network_info: [{"id": "766304d2-5559-4007-9fa4-a01027d56e49", "address": "fa:16:3e:7b:c7:c8", "network": {"id": "aed15283-4a79-4e99-8b6c-49cf754138de", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1271042528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.145", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30209bc93a4042488f15c73b7e4733d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap766304d2-55", "ovs_interfaceid": "766304d2-5559-4007-9fa4-a01027d56e49", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "3cd046f6-ec54-42ea-acae-2410cbcf3a47", "address": "fa:16:3e:50:8c:2b", "network": {"id": "aed15283-4a79-4e99-8b6c-49cf754138de", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1271042528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30209bc93a4042488f15c73b7e4733d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3cd046f6-ec", "ovs_interfaceid": "3cd046f6-ec54-42ea-acae-2410cbcf3a47", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 959.255681] env[69328]: DEBUG oslo_concurrency.lockutils [req-d9cf09cc-f969-48a2-875e-4d90a6a73755 req-c3ccbdf7-931c-4003-a824-0906b1a616fd service nova] Releasing lock "refresh_cache-96f604a9-e42c-4aa8-b5b5-edcb34901d94" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 959.255961] env[69328]: DEBUG nova.compute.manager [req-d9cf09cc-f969-48a2-875e-4d90a6a73755 req-c3ccbdf7-931c-4003-a824-0906b1a616fd service nova] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Received event network-vif-deleted-fbe60697-372d-45c9-97c0-49ce01cbc064 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 959.294026] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3c4cbf48-ebad-4501-a3b9-8b2be5413cbb tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "9ad2b2e3-460a-403e-bfc7-f46648c93849" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.311s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 959.313707] env[69328]: DEBUG oslo_vmware.api [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52dfda1a-add1-6500-d2a0-951073e613f6, 'name': SearchDatastore_Task, 'duration_secs': 0.015184} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.316706] env[69328]: DEBUG oslo_concurrency.lockutils [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 959.316706] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 959.316706] env[69328]: DEBUG oslo_concurrency.lockutils [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 959.316706] env[69328]: DEBUG oslo_concurrency.lockutils [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 959.316706] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 959.316706] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-86c921f1-b8ec-4e4f-83b9-6db559c61727 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.326027] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 959.326510] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 959.327844] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8d9522c-ef23-4e8d-a1b8-4d12916a06a2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.335742] env[69328]: DEBUG oslo_vmware.api [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 959.335742] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52302603-db83-f310-3179-e3c282549c52" [ 959.335742] env[69328]: _type = "Task" [ 959.335742] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.346727] env[69328]: DEBUG oslo_vmware.api [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52302603-db83-f310-3179-e3c282549c52, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.421484] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquiring lock "52c87371-4142-40d6-ac68-804aabd9f823" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 959.421484] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Lock "52c87371-4142-40d6-ac68-804aabd9f823" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 959.615462] env[69328]: DEBUG nova.scheduler.client.report [None req-aa421e2c-9c56-4abb-8b44-da66e527c182 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 959.641496] env[69328]: DEBUG oslo_vmware.api [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273655, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.644591] env[69328]: DEBUG oslo_concurrency.lockutils [req-060169de-3f06-4db6-ad44-c2fbaf064568 req-ef335c6e-a671-4d7c-ac93-2a8e93c97f86 service nova] Releasing lock "refresh_cache-65e38a02-880b-46e2-8866-645a9fc17c7a" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 959.704087] env[69328]: DEBUG oslo_concurrency.lockutils [None req-73b796c4-34b6-4b8d-8503-ae2b131f9fce tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Releasing lock "refresh_cache-36f6aab5-2774-402b-9db6-9912f2d5d473" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 959.704812] env[69328]: DEBUG oslo_concurrency.lockutils [None req-73b796c4-34b6-4b8d-8503-ae2b131f9fce tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "36f6aab5-2774-402b-9db6-9912f2d5d473" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 959.704972] env[69328]: DEBUG oslo_concurrency.lockutils [None req-73b796c4-34b6-4b8d-8503-ae2b131f9fce tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquired lock "36f6aab5-2774-402b-9db6-9912f2d5d473" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 959.705861] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-492ebb2c-cd1c-44ff-8ffd-e90888077e7b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.727264] env[69328]: DEBUG nova.virt.hardware [None req-73b796c4-34b6-4b8d-8503-ae2b131f9fce tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 959.727525] env[69328]: DEBUG nova.virt.hardware [None req-73b796c4-34b6-4b8d-8503-ae2b131f9fce tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 959.727680] env[69328]: DEBUG nova.virt.hardware [None req-73b796c4-34b6-4b8d-8503-ae2b131f9fce tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 959.727862] env[69328]: DEBUG nova.virt.hardware [None req-73b796c4-34b6-4b8d-8503-ae2b131f9fce tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 959.728026] env[69328]: DEBUG nova.virt.hardware [None req-73b796c4-34b6-4b8d-8503-ae2b131f9fce tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 959.728290] env[69328]: DEBUG nova.virt.hardware [None req-73b796c4-34b6-4b8d-8503-ae2b131f9fce tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 959.728557] env[69328]: DEBUG nova.virt.hardware [None req-73b796c4-34b6-4b8d-8503-ae2b131f9fce tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 959.728805] env[69328]: DEBUG nova.virt.hardware [None req-73b796c4-34b6-4b8d-8503-ae2b131f9fce tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 959.729112] env[69328]: DEBUG nova.virt.hardware [None req-73b796c4-34b6-4b8d-8503-ae2b131f9fce tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 959.729203] env[69328]: DEBUG nova.virt.hardware [None req-73b796c4-34b6-4b8d-8503-ae2b131f9fce tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 959.729371] env[69328]: DEBUG nova.virt.hardware [None req-73b796c4-34b6-4b8d-8503-ae2b131f9fce tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 959.736388] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-73b796c4-34b6-4b8d-8503-ae2b131f9fce tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Reconfiguring VM to attach interface {{(pid=69328) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 959.736780] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0f121bb9-53c7-4431-8703-0316a92832ae {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.757040] env[69328]: DEBUG oslo_vmware.api [None req-73b796c4-34b6-4b8d-8503-ae2b131f9fce tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for the task: (returnval){ [ 959.757040] env[69328]: value = "task-3273656" [ 959.757040] env[69328]: _type = "Task" [ 959.757040] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.767021] env[69328]: DEBUG oslo_vmware.api [None req-73b796c4-34b6-4b8d-8503-ae2b131f9fce tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273656, 'name': ReconfigVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.847756] env[69328]: DEBUG oslo_vmware.api [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52302603-db83-f310-3179-e3c282549c52, 'name': SearchDatastore_Task, 'duration_secs': 0.011257} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.848590] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c4977f03-966d-49ad-9178-f7fb449117f6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.855270] env[69328]: DEBUG oslo_vmware.api [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 959.855270] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]520948bc-c8fc-b108-1028-1133ba122be7" [ 959.855270] env[69328]: _type = "Task" [ 959.855270] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.868459] env[69328]: DEBUG oslo_vmware.api [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]520948bc-c8fc-b108-1028-1133ba122be7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.923972] env[69328]: DEBUG nova.compute.manager [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 959.995407] env[69328]: DEBUG nova.compute.manager [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 960.025649] env[69328]: DEBUG nova.virt.hardware [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 960.025908] env[69328]: DEBUG nova.virt.hardware [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 960.026081] env[69328]: DEBUG nova.virt.hardware [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 960.026433] env[69328]: DEBUG nova.virt.hardware [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 960.026572] env[69328]: DEBUG nova.virt.hardware [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 960.026721] env[69328]: DEBUG nova.virt.hardware [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 960.026961] env[69328]: DEBUG nova.virt.hardware [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 960.027111] env[69328]: DEBUG nova.virt.hardware [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 960.027278] env[69328]: DEBUG nova.virt.hardware [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 960.027442] env[69328]: DEBUG nova.virt.hardware [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 960.027614] env[69328]: DEBUG nova.virt.hardware [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 960.028543] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fa72f87-df9f-4bfa-8241-7c7b22a857e1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.037396] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebea36fb-3d2e-4014-b0e7-ef8adc2c2594 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.124392] env[69328]: DEBUG oslo_concurrency.lockutils [None req-aa421e2c-9c56-4abb-8b44-da66e527c182 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.154s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 960.126172] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquiring lock "51a9c492-6f91-4186-b550-ef12284b8a84" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 960.126397] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lock "51a9c492-6f91-4186-b550-ef12284b8a84" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 960.127679] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.642s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 960.129383] env[69328]: INFO nova.compute.claims [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] [instance: 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 960.143127] env[69328]: DEBUG oslo_vmware.api [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273655, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.60905} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.143688] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 96f604a9-e42c-4aa8-b5b5-edcb34901d94/96f604a9-e42c-4aa8-b5b5-edcb34901d94.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 960.143688] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 960.144056] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2a574600-e806-45f0-94ec-264290255cce {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.147868] env[69328]: INFO nova.scheduler.client.report [None req-aa421e2c-9c56-4abb-8b44-da66e527c182 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Deleted allocations for instance 82e27131-b401-4885-83fb-825e5c8e2444 [ 960.158893] env[69328]: DEBUG oslo_vmware.api [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 960.158893] env[69328]: value = "task-3273657" [ 960.158893] env[69328]: _type = "Task" [ 960.158893] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.169075] env[69328]: DEBUG oslo_vmware.api [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273657, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.270902] env[69328]: DEBUG oslo_vmware.api [None req-73b796c4-34b6-4b8d-8503-ae2b131f9fce tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273656, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.370846] env[69328]: DEBUG oslo_vmware.api [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]520948bc-c8fc-b108-1028-1133ba122be7, 'name': SearchDatastore_Task, 'duration_secs': 0.030647} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.371147] env[69328]: DEBUG oslo_concurrency.lockutils [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 960.371684] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 65e38a02-880b-46e2-8866-645a9fc17c7a/65e38a02-880b-46e2-8866-645a9fc17c7a.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 960.371840] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4721e1bd-dbb0-4be1-9f3a-052df9c44bed {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.379710] env[69328]: DEBUG oslo_vmware.api [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 960.379710] env[69328]: value = "task-3273658" [ 960.379710] env[69328]: _type = "Task" [ 960.379710] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.389088] env[69328]: DEBUG oslo_vmware.api [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273658, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.442411] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "b61436f5-0e8b-4da5-9459-cf9487dfd23f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 960.442727] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "b61436f5-0e8b-4da5-9459-cf9487dfd23f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 960.446007] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 960.632954] env[69328]: DEBUG nova.compute.manager [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 960.655853] env[69328]: DEBUG oslo_concurrency.lockutils [None req-aa421e2c-9c56-4abb-8b44-da66e527c182 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Lock "82e27131-b401-4885-83fb-825e5c8e2444" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.005s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 960.666657] env[69328]: DEBUG nova.compute.manager [req-8cfe81ce-8365-41e3-ac9e-5419db364ec8 req-c8475a32-7c49-4104-9014-01ee81fbbc65 service nova] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Received event network-vif-deleted-33b9c9a0-82a8-4195-bb5a-d8e6e911ddc5 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 960.671863] env[69328]: DEBUG oslo_vmware.api [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273657, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.126392} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.671863] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 960.672591] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccb23480-1761-4d94-af20-fb3918643e40 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.702357] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Reconfiguring VM instance instance-00000051 to attach disk [datastore2] 96f604a9-e42c-4aa8-b5b5-edcb34901d94/96f604a9-e42c-4aa8-b5b5-edcb34901d94.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 960.703299] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6e2da5ca-bf35-4beb-8ab6-cf6389b30874 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.722900] env[69328]: DEBUG nova.network.neutron [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] Successfully updated port: 9fa28c73-cefa-44f5-a043-9e6ce86838c0 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 960.734164] env[69328]: DEBUG oslo_vmware.api [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 960.734164] env[69328]: value = "task-3273659" [ 960.734164] env[69328]: _type = "Task" [ 960.734164] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.745599] env[69328]: DEBUG oslo_vmware.api [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273659, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.769558] env[69328]: DEBUG oslo_vmware.api [None req-73b796c4-34b6-4b8d-8503-ae2b131f9fce tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273656, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.883429] env[69328]: DEBUG nova.compute.manager [req-ba5c2ba2-3dc4-41b9-94f1-d263235e349f req-bab72010-3045-4b93-86d9-0fd2a2dea5a4 service nova] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Received event network-vif-plugged-3cd046f6-ec54-42ea-acae-2410cbcf3a47 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 960.883730] env[69328]: DEBUG oslo_concurrency.lockutils [req-ba5c2ba2-3dc4-41b9-94f1-d263235e349f req-bab72010-3045-4b93-86d9-0fd2a2dea5a4 service nova] Acquiring lock "36f6aab5-2774-402b-9db6-9912f2d5d473-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 960.884018] env[69328]: DEBUG oslo_concurrency.lockutils [req-ba5c2ba2-3dc4-41b9-94f1-d263235e349f req-bab72010-3045-4b93-86d9-0fd2a2dea5a4 service nova] Lock "36f6aab5-2774-402b-9db6-9912f2d5d473-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 960.884253] env[69328]: DEBUG oslo_concurrency.lockutils [req-ba5c2ba2-3dc4-41b9-94f1-d263235e349f req-bab72010-3045-4b93-86d9-0fd2a2dea5a4 service nova] Lock "36f6aab5-2774-402b-9db6-9912f2d5d473-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 960.884481] env[69328]: DEBUG nova.compute.manager [req-ba5c2ba2-3dc4-41b9-94f1-d263235e349f req-bab72010-3045-4b93-86d9-0fd2a2dea5a4 service nova] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] No waiting events found dispatching network-vif-plugged-3cd046f6-ec54-42ea-acae-2410cbcf3a47 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 960.884751] env[69328]: WARNING nova.compute.manager [req-ba5c2ba2-3dc4-41b9-94f1-d263235e349f req-bab72010-3045-4b93-86d9-0fd2a2dea5a4 service nova] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Received unexpected event network-vif-plugged-3cd046f6-ec54-42ea-acae-2410cbcf3a47 for instance with vm_state active and task_state None. [ 960.884964] env[69328]: DEBUG nova.compute.manager [req-ba5c2ba2-3dc4-41b9-94f1-d263235e349f req-bab72010-3045-4b93-86d9-0fd2a2dea5a4 service nova] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Received event network-changed-3cd046f6-ec54-42ea-acae-2410cbcf3a47 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 960.885278] env[69328]: DEBUG nova.compute.manager [req-ba5c2ba2-3dc4-41b9-94f1-d263235e349f req-bab72010-3045-4b93-86d9-0fd2a2dea5a4 service nova] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Refreshing instance network info cache due to event network-changed-3cd046f6-ec54-42ea-acae-2410cbcf3a47. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 960.885484] env[69328]: DEBUG oslo_concurrency.lockutils [req-ba5c2ba2-3dc4-41b9-94f1-d263235e349f req-bab72010-3045-4b93-86d9-0fd2a2dea5a4 service nova] Acquiring lock "refresh_cache-36f6aab5-2774-402b-9db6-9912f2d5d473" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.885709] env[69328]: DEBUG oslo_concurrency.lockutils [req-ba5c2ba2-3dc4-41b9-94f1-d263235e349f req-bab72010-3045-4b93-86d9-0fd2a2dea5a4 service nova] Acquired lock "refresh_cache-36f6aab5-2774-402b-9db6-9912f2d5d473" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 960.885970] env[69328]: DEBUG nova.network.neutron [req-ba5c2ba2-3dc4-41b9-94f1-d263235e349f req-bab72010-3045-4b93-86d9-0fd2a2dea5a4 service nova] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Refreshing network info cache for port 3cd046f6-ec54-42ea-acae-2410cbcf3a47 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 960.901988] env[69328]: DEBUG oslo_vmware.api [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273658, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.948458] env[69328]: DEBUG nova.compute.manager [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 961.160912] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 961.226259] env[69328]: DEBUG oslo_concurrency.lockutils [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquiring lock "refresh_cache-a95d01cf-c26b-466c-a5b6-a7e43f0321fa" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 961.226439] env[69328]: DEBUG oslo_concurrency.lockutils [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquired lock "refresh_cache-a95d01cf-c26b-466c-a5b6-a7e43f0321fa" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 961.226619] env[69328]: DEBUG nova.network.neutron [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 961.244717] env[69328]: DEBUG oslo_vmware.api [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273659, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.269960] env[69328]: DEBUG oslo_vmware.api [None req-73b796c4-34b6-4b8d-8503-ae2b131f9fce tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273656, 'name': ReconfigVM_Task, 'duration_secs': 1.227279} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.272883] env[69328]: DEBUG oslo_concurrency.lockutils [None req-73b796c4-34b6-4b8d-8503-ae2b131f9fce tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Releasing lock "36f6aab5-2774-402b-9db6-9912f2d5d473" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 961.273129] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-73b796c4-34b6-4b8d-8503-ae2b131f9fce tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Reconfigured VM to attach interface {{(pid=69328) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 961.397418] env[69328]: DEBUG oslo_vmware.api [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273658, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.759012} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.397761] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 65e38a02-880b-46e2-8866-645a9fc17c7a/65e38a02-880b-46e2-8866-645a9fc17c7a.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 961.398057] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 961.398661] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c93698dc-e84c-4669-b5d4-05e7fca43011 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.406983] env[69328]: DEBUG oslo_vmware.api [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 961.406983] env[69328]: value = "task-3273661" [ 961.406983] env[69328]: _type = "Task" [ 961.406983] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.419268] env[69328]: DEBUG oslo_vmware.api [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273661, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.459838] env[69328]: DEBUG oslo_concurrency.lockutils [None req-be4368cc-235b-4293-8984-4bd2a2f178dd tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "9ad2b2e3-460a-403e-bfc7-f46648c93849" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 961.460285] env[69328]: DEBUG oslo_concurrency.lockutils [None req-be4368cc-235b-4293-8984-4bd2a2f178dd tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "9ad2b2e3-460a-403e-bfc7-f46648c93849" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 961.460686] env[69328]: DEBUG oslo_concurrency.lockutils [None req-be4368cc-235b-4293-8984-4bd2a2f178dd tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "9ad2b2e3-460a-403e-bfc7-f46648c93849-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 961.462039] env[69328]: DEBUG oslo_concurrency.lockutils [None req-be4368cc-235b-4293-8984-4bd2a2f178dd tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "9ad2b2e3-460a-403e-bfc7-f46648c93849-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 961.462039] env[69328]: DEBUG oslo_concurrency.lockutils [None req-be4368cc-235b-4293-8984-4bd2a2f178dd tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "9ad2b2e3-460a-403e-bfc7-f46648c93849-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 961.466951] env[69328]: INFO nova.compute.manager [None req-be4368cc-235b-4293-8984-4bd2a2f178dd tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] Terminating instance [ 961.474846] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 961.514514] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-016bf89f-4342-491b-bdbe-f54b452a962e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.525169] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a4a3eee-5478-4c80-9355-2701a7ee7193 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.562454] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8883a21b-a2fc-4472-bb7e-00f91e9a3318 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.571812] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-602d8bf8-dee3-49dc-8b0d-6b72d431bc02 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.588969] env[69328]: DEBUG nova.compute.provider_tree [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 961.708028] env[69328]: DEBUG nova.network.neutron [req-ba5c2ba2-3dc4-41b9-94f1-d263235e349f req-bab72010-3045-4b93-86d9-0fd2a2dea5a4 service nova] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Updated VIF entry in instance network info cache for port 3cd046f6-ec54-42ea-acae-2410cbcf3a47. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 961.708643] env[69328]: DEBUG nova.network.neutron [req-ba5c2ba2-3dc4-41b9-94f1-d263235e349f req-bab72010-3045-4b93-86d9-0fd2a2dea5a4 service nova] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Updating instance_info_cache with network_info: [{"id": "766304d2-5559-4007-9fa4-a01027d56e49", "address": "fa:16:3e:7b:c7:c8", "network": {"id": "aed15283-4a79-4e99-8b6c-49cf754138de", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1271042528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.145", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30209bc93a4042488f15c73b7e4733d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap766304d2-55", "ovs_interfaceid": "766304d2-5559-4007-9fa4-a01027d56e49", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "3cd046f6-ec54-42ea-acae-2410cbcf3a47", "address": "fa:16:3e:50:8c:2b", "network": {"id": "aed15283-4a79-4e99-8b6c-49cf754138de", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1271042528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30209bc93a4042488f15c73b7e4733d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3cd046f6-ec", "ovs_interfaceid": "3cd046f6-ec54-42ea-acae-2410cbcf3a47", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 961.745591] env[69328]: DEBUG oslo_vmware.api [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273659, 'name': ReconfigVM_Task, 'duration_secs': 0.650003} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.745958] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Reconfigured VM instance instance-00000051 to attach disk [datastore2] 96f604a9-e42c-4aa8-b5b5-edcb34901d94/96f604a9-e42c-4aa8-b5b5-edcb34901d94.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 961.746714] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7e6dc8aa-dda4-417b-952f-d78068b2b511 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.754653] env[69328]: DEBUG oslo_vmware.api [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 961.754653] env[69328]: value = "task-3273662" [ 961.754653] env[69328]: _type = "Task" [ 961.754653] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.764019] env[69328]: DEBUG oslo_vmware.api [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273662, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.774331] env[69328]: DEBUG nova.network.neutron [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 961.777168] env[69328]: DEBUG oslo_concurrency.lockutils [None req-73b796c4-34b6-4b8d-8503-ae2b131f9fce tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "interface-36f6aab5-2774-402b-9db6-9912f2d5d473-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.588s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 961.917436] env[69328]: DEBUG oslo_vmware.api [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273661, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.190111} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.917724] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 961.918532] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9cbee73-22dd-4e1a-acd9-b620fd4ad392 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.945564] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Reconfiguring VM instance instance-00000052 to attach disk [datastore2] 65e38a02-880b-46e2-8866-645a9fc17c7a/65e38a02-880b-46e2-8866-645a9fc17c7a.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 961.945564] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7148d22b-f922-4461-81a3-12baca79e073 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.964104] env[69328]: DEBUG oslo_vmware.api [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 961.964104] env[69328]: value = "task-3273663" [ 961.964104] env[69328]: _type = "Task" [ 961.964104] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.965212] env[69328]: DEBUG nova.network.neutron [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] Updating instance_info_cache with network_info: [{"id": "9fa28c73-cefa-44f5-a043-9e6ce86838c0", "address": "fa:16:3e:c6:99:bc", "network": {"id": "77f17547-8c62-4a31-9840-8d2cbb1bfbab", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-163692077-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bad0df17bba4bc996fe5cf1faf23fad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9fa28c73-ce", "ovs_interfaceid": "9fa28c73-cefa-44f5-a043-9e6ce86838c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 961.972963] env[69328]: DEBUG nova.compute.manager [None req-be4368cc-235b-4293-8984-4bd2a2f178dd tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 961.973267] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-be4368cc-235b-4293-8984-4bd2a2f178dd tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 961.977207] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fba2b5c-35c9-4053-a6f7-086377cec176 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.980017] env[69328]: DEBUG oslo_vmware.api [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273663, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.985315] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-be4368cc-235b-4293-8984-4bd2a2f178dd tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 961.985572] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9ffc896a-6413-48f3-8028-3baa8bf75dbd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.994539] env[69328]: DEBUG oslo_vmware.api [None req-be4368cc-235b-4293-8984-4bd2a2f178dd tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 961.994539] env[69328]: value = "task-3273664" [ 961.994539] env[69328]: _type = "Task" [ 961.994539] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.006120] env[69328]: DEBUG oslo_vmware.api [None req-be4368cc-235b-4293-8984-4bd2a2f178dd tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273664, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.095091] env[69328]: DEBUG nova.scheduler.client.report [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 962.211125] env[69328]: DEBUG oslo_concurrency.lockutils [req-ba5c2ba2-3dc4-41b9-94f1-d263235e349f req-bab72010-3045-4b93-86d9-0fd2a2dea5a4 service nova] Releasing lock "refresh_cache-36f6aab5-2774-402b-9db6-9912f2d5d473" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 962.265775] env[69328]: DEBUG oslo_vmware.api [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273662, 'name': Rename_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.471890] env[69328]: DEBUG oslo_concurrency.lockutils [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Releasing lock "refresh_cache-a95d01cf-c26b-466c-a5b6-a7e43f0321fa" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 962.472246] env[69328]: DEBUG nova.compute.manager [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] Instance network_info: |[{"id": "9fa28c73-cefa-44f5-a043-9e6ce86838c0", "address": "fa:16:3e:c6:99:bc", "network": {"id": "77f17547-8c62-4a31-9840-8d2cbb1bfbab", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-163692077-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bad0df17bba4bc996fe5cf1faf23fad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9fa28c73-ce", "ovs_interfaceid": "9fa28c73-cefa-44f5-a043-9e6ce86838c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 962.472670] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c6:99:bc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '62237242-7ce2-4664-a1c5-6783b516b507', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9fa28c73-cefa-44f5-a043-9e6ce86838c0', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 962.480829] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 962.484929] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 962.485279] env[69328]: DEBUG oslo_vmware.api [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273663, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.485521] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fa47d1f6-c7e1-40d2-aaed-cd60c11238e4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.511165] env[69328]: DEBUG oslo_vmware.api [None req-be4368cc-235b-4293-8984-4bd2a2f178dd tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273664, 'name': PowerOffVM_Task, 'duration_secs': 0.438995} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.512316] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-be4368cc-235b-4293-8984-4bd2a2f178dd tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 962.512533] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-be4368cc-235b-4293-8984-4bd2a2f178dd tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 962.512921] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 962.512921] env[69328]: value = "task-3273665" [ 962.512921] env[69328]: _type = "Task" [ 962.512921] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.513375] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f39e0234-c416-4242-be24-5b59451fc595 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.523834] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273665, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.600516] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.472s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 962.601438] env[69328]: DEBUG nova.compute.manager [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] [instance: 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 962.607330] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8b9e830e-0a60-44c3-bc92-024100b179a6 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.967s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 962.607597] env[69328]: DEBUG nova.objects.instance [None req-8b9e830e-0a60-44c3-bc92-024100b179a6 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Lazy-loading 'resources' on Instance uuid 18022645-9a2a-489e-b0b1-486165f46f14 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 962.608767] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-be4368cc-235b-4293-8984-4bd2a2f178dd tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 962.608995] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-be4368cc-235b-4293-8984-4bd2a2f178dd tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 962.609197] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-be4368cc-235b-4293-8984-4bd2a2f178dd tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Deleting the datastore file [datastore2] 9ad2b2e3-460a-403e-bfc7-f46648c93849 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 962.609821] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f4f79baf-7ccc-478a-b348-c9b4edc97346 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.620956] env[69328]: DEBUG oslo_vmware.api [None req-be4368cc-235b-4293-8984-4bd2a2f178dd tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 962.620956] env[69328]: value = "task-3273667" [ 962.620956] env[69328]: _type = "Task" [ 962.620956] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.766341] env[69328]: DEBUG oslo_vmware.api [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273662, 'name': Rename_Task, 'duration_secs': 0.512974} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.766677] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 962.766853] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ddcabdad-c590-49ca-8f31-44b6138031be {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.774580] env[69328]: DEBUG oslo_vmware.api [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 962.774580] env[69328]: value = "task-3273668" [ 962.774580] env[69328]: _type = "Task" [ 962.774580] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.782432] env[69328]: DEBUG oslo_vmware.api [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273668, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.912860] env[69328]: DEBUG nova.compute.manager [req-a6463634-420a-4b6d-9f8c-1238ca2eb09b req-b5f7bc60-ba07-4003-aec9-4ddd4ac63c1e service nova] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] Received event network-vif-plugged-9fa28c73-cefa-44f5-a043-9e6ce86838c0 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 962.913062] env[69328]: DEBUG oslo_concurrency.lockutils [req-a6463634-420a-4b6d-9f8c-1238ca2eb09b req-b5f7bc60-ba07-4003-aec9-4ddd4ac63c1e service nova] Acquiring lock "a95d01cf-c26b-466c-a5b6-a7e43f0321fa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 962.913278] env[69328]: DEBUG oslo_concurrency.lockutils [req-a6463634-420a-4b6d-9f8c-1238ca2eb09b req-b5f7bc60-ba07-4003-aec9-4ddd4ac63c1e service nova] Lock "a95d01cf-c26b-466c-a5b6-a7e43f0321fa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 962.913438] env[69328]: DEBUG oslo_concurrency.lockutils [req-a6463634-420a-4b6d-9f8c-1238ca2eb09b req-b5f7bc60-ba07-4003-aec9-4ddd4ac63c1e service nova] Lock "a95d01cf-c26b-466c-a5b6-a7e43f0321fa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 962.913603] env[69328]: DEBUG nova.compute.manager [req-a6463634-420a-4b6d-9f8c-1238ca2eb09b req-b5f7bc60-ba07-4003-aec9-4ddd4ac63c1e service nova] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] No waiting events found dispatching network-vif-plugged-9fa28c73-cefa-44f5-a043-9e6ce86838c0 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 962.913779] env[69328]: WARNING nova.compute.manager [req-a6463634-420a-4b6d-9f8c-1238ca2eb09b req-b5f7bc60-ba07-4003-aec9-4ddd4ac63c1e service nova] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] Received unexpected event network-vif-plugged-9fa28c73-cefa-44f5-a043-9e6ce86838c0 for instance with vm_state building and task_state spawning. [ 962.913942] env[69328]: DEBUG nova.compute.manager [req-a6463634-420a-4b6d-9f8c-1238ca2eb09b req-b5f7bc60-ba07-4003-aec9-4ddd4ac63c1e service nova] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] Received event network-changed-9fa28c73-cefa-44f5-a043-9e6ce86838c0 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 962.914106] env[69328]: DEBUG nova.compute.manager [req-a6463634-420a-4b6d-9f8c-1238ca2eb09b req-b5f7bc60-ba07-4003-aec9-4ddd4ac63c1e service nova] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] Refreshing instance network info cache due to event network-changed-9fa28c73-cefa-44f5-a043-9e6ce86838c0. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 962.914287] env[69328]: DEBUG oslo_concurrency.lockutils [req-a6463634-420a-4b6d-9f8c-1238ca2eb09b req-b5f7bc60-ba07-4003-aec9-4ddd4ac63c1e service nova] Acquiring lock "refresh_cache-a95d01cf-c26b-466c-a5b6-a7e43f0321fa" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 962.914423] env[69328]: DEBUG oslo_concurrency.lockutils [req-a6463634-420a-4b6d-9f8c-1238ca2eb09b req-b5f7bc60-ba07-4003-aec9-4ddd4ac63c1e service nova] Acquired lock "refresh_cache-a95d01cf-c26b-466c-a5b6-a7e43f0321fa" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 962.914707] env[69328]: DEBUG nova.network.neutron [req-a6463634-420a-4b6d-9f8c-1238ca2eb09b req-b5f7bc60-ba07-4003-aec9-4ddd4ac63c1e service nova] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] Refreshing network info cache for port 9fa28c73-cefa-44f5-a043-9e6ce86838c0 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 962.977304] env[69328]: DEBUG oslo_vmware.api [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273663, 'name': ReconfigVM_Task, 'duration_secs': 0.656394} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.977581] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Reconfigured VM instance instance-00000052 to attach disk [datastore2] 65e38a02-880b-46e2-8866-645a9fc17c7a/65e38a02-880b-46e2-8866-645a9fc17c7a.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 962.978217] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e9751a65-3751-46f8-9120-5f53dca380a8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.985249] env[69328]: DEBUG oslo_vmware.api [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 962.985249] env[69328]: value = "task-3273669" [ 962.985249] env[69328]: _type = "Task" [ 962.985249] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.993666] env[69328]: DEBUG oslo_vmware.api [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273669, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.025158] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273665, 'name': CreateVM_Task, 'duration_secs': 0.430122} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.025351] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 963.026091] env[69328]: DEBUG oslo_concurrency.lockutils [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 963.026265] env[69328]: DEBUG oslo_concurrency.lockutils [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 963.026596] env[69328]: DEBUG oslo_concurrency.lockutils [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 963.026857] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e3e3ded-a685-46f6-af59-1f50b53232d0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.032803] env[69328]: DEBUG oslo_vmware.api [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 963.032803] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]523fbf29-fe38-9662-0a8a-3d2bf4cff372" [ 963.032803] env[69328]: _type = "Task" [ 963.032803] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.042078] env[69328]: DEBUG oslo_vmware.api [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]523fbf29-fe38-9662-0a8a-3d2bf4cff372, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.117027] env[69328]: DEBUG nova.compute.utils [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 963.117027] env[69328]: DEBUG nova.compute.manager [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] [instance: 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 963.117027] env[69328]: DEBUG nova.network.neutron [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] [instance: 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 963.134675] env[69328]: DEBUG oslo_vmware.api [None req-be4368cc-235b-4293-8984-4bd2a2f178dd tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273667, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.261609} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.137025] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-be4368cc-235b-4293-8984-4bd2a2f178dd tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 963.137025] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-be4368cc-235b-4293-8984-4bd2a2f178dd tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 963.137025] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-be4368cc-235b-4293-8984-4bd2a2f178dd tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 963.137025] env[69328]: INFO nova.compute.manager [None req-be4368cc-235b-4293-8984-4bd2a2f178dd tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] Took 1.16 seconds to destroy the instance on the hypervisor. [ 963.137688] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-be4368cc-235b-4293-8984-4bd2a2f178dd tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 963.138678] env[69328]: DEBUG nova.compute.manager [-] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 963.138931] env[69328]: DEBUG nova.network.neutron [-] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 963.190200] env[69328]: DEBUG nova.policy [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0856ffa67f15460faff5615607ff3c57', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e9132cd9fbb84a24a9007c37e02261dd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 963.286145] env[69328]: DEBUG oslo_vmware.api [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273668, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.360131] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f77844c-f748-4d64-ad38-fcb4c51d97ec tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Volume attach. Driver type: vmdk {{(pid=69328) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 963.360131] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f77844c-f748-4d64-ad38-fcb4c51d97ec tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653876', 'volume_id': 'a39fd325-7c9d-4482-b7a4-43b28bf52e5c', 'name': 'volume-a39fd325-7c9d-4482-b7a4-43b28bf52e5c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3ba646e8-a5c8-4917-a1c4-32b37affb598', 'attached_at': '', 'detached_at': '', 'volume_id': 'a39fd325-7c9d-4482-b7a4-43b28bf52e5c', 'serial': 'a39fd325-7c9d-4482-b7a4-43b28bf52e5c'} {{(pid=69328) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 963.361083] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62edcb69-fdfb-4a87-ba37-1389745bbb40 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.381853] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9bbe922-7568-4acc-a81b-34f74eace002 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.413543] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f77844c-f748-4d64-ad38-fcb4c51d97ec tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Reconfiguring VM instance instance-00000046 to attach disk [datastore2] volume-a39fd325-7c9d-4482-b7a4-43b28bf52e5c/volume-a39fd325-7c9d-4482-b7a4-43b28bf52e5c.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 963.417257] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e111ac00-a5a6-44d5-9de8-0503143de3e1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.441165] env[69328]: DEBUG oslo_vmware.api [None req-9f77844c-f748-4d64-ad38-fcb4c51d97ec tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 963.441165] env[69328]: value = "task-3273670" [ 963.441165] env[69328]: _type = "Task" [ 963.441165] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.446948] env[69328]: DEBUG nova.compute.manager [req-a81476df-1650-4d88-b8f3-5c2b026a8edd req-11dc06c2-27a9-40ef-a00e-7d78bf16ee09 service nova] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] Received event network-vif-deleted-322eba88-5363-41a8-a44d-50e0a7fdf92e {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 963.447368] env[69328]: INFO nova.compute.manager [req-a81476df-1650-4d88-b8f3-5c2b026a8edd req-11dc06c2-27a9-40ef-a00e-7d78bf16ee09 service nova] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] Neutron deleted interface 322eba88-5363-41a8-a44d-50e0a7fdf92e; detaching it from the instance and deleting it from the info cache [ 963.447619] env[69328]: DEBUG nova.network.neutron [req-a81476df-1650-4d88-b8f3-5c2b026a8edd req-11dc06c2-27a9-40ef-a00e-7d78bf16ee09 service nova] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 963.458403] env[69328]: DEBUG oslo_vmware.api [None req-9f77844c-f748-4d64-ad38-fcb4c51d97ec tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273670, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.497577] env[69328]: DEBUG oslo_vmware.api [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273669, 'name': Rename_Task, 'duration_secs': 0.203298} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.501390] env[69328]: DEBUG nova.network.neutron [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] [instance: 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea] Successfully created port: 3d148298-6666-4189-8b2b-3048f28a8bb7 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 963.503261] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 963.504042] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-60a36421-4459-4a29-8da9-f1c548020485 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.512038] env[69328]: DEBUG oslo_vmware.api [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 963.512038] env[69328]: value = "task-3273671" [ 963.512038] env[69328]: _type = "Task" [ 963.512038] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.532863] env[69328]: DEBUG oslo_vmware.api [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273671, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.545997] env[69328]: DEBUG oslo_vmware.api [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]523fbf29-fe38-9662-0a8a-3d2bf4cff372, 'name': SearchDatastore_Task, 'duration_secs': 0.010644} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.548545] env[69328]: DEBUG oslo_concurrency.lockutils [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 963.549467] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 963.549467] env[69328]: DEBUG oslo_concurrency.lockutils [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 963.549467] env[69328]: DEBUG oslo_concurrency.lockutils [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 963.549467] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 963.549850] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-30ff3d79-d063-486d-a007-317b7ae56da4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.570854] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 963.570854] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 963.572160] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d503887d-dfbf-48dd-b999-74382a618ecf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.578046] env[69328]: DEBUG oslo_concurrency.lockutils [None req-57bfa0df-fcd6-4e15-9bff-8dc79759f989 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "interface-36f6aab5-2774-402b-9db6-9912f2d5d473-3cd046f6-ec54-42ea-acae-2410cbcf3a47" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 963.578347] env[69328]: DEBUG oslo_concurrency.lockutils [None req-57bfa0df-fcd6-4e15-9bff-8dc79759f989 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "interface-36f6aab5-2774-402b-9db6-9912f2d5d473-3cd046f6-ec54-42ea-acae-2410cbcf3a47" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 963.591087] env[69328]: DEBUG oslo_vmware.api [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 963.591087] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]520b278e-d958-5d86-b443-d0ca6eb5bd6b" [ 963.591087] env[69328]: _type = "Task" [ 963.591087] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.597039] env[69328]: DEBUG oslo_vmware.api [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]520b278e-d958-5d86-b443-d0ca6eb5bd6b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.609099] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dd76ee2-dece-4d5f-8783-a55e167a0825 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.620980] env[69328]: DEBUG nova.compute.manager [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] [instance: 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 963.627502] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49b4fd33-2f52-486c-9df9-20ab1bb641a0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.668557] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-257389dd-879c-4a9c-8bde-96242e80f3d0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.678475] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b02979c4-e4f4-45f8-88c7-f1bb5d885596 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.693339] env[69328]: DEBUG nova.compute.provider_tree [None req-8b9e830e-0a60-44c3-bc92-024100b179a6 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 963.705478] env[69328]: DEBUG nova.network.neutron [req-a6463634-420a-4b6d-9f8c-1238ca2eb09b req-b5f7bc60-ba07-4003-aec9-4ddd4ac63c1e service nova] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] Updated VIF entry in instance network info cache for port 9fa28c73-cefa-44f5-a043-9e6ce86838c0. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 963.705478] env[69328]: DEBUG nova.network.neutron [req-a6463634-420a-4b6d-9f8c-1238ca2eb09b req-b5f7bc60-ba07-4003-aec9-4ddd4ac63c1e service nova] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] Updating instance_info_cache with network_info: [{"id": "9fa28c73-cefa-44f5-a043-9e6ce86838c0", "address": "fa:16:3e:c6:99:bc", "network": {"id": "77f17547-8c62-4a31-9840-8d2cbb1bfbab", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-163692077-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bad0df17bba4bc996fe5cf1faf23fad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9fa28c73-ce", "ovs_interfaceid": "9fa28c73-cefa-44f5-a043-9e6ce86838c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 963.787426] env[69328]: DEBUG oslo_vmware.api [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273668, 'name': PowerOnVM_Task, 'duration_secs': 0.877416} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.787724] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 963.787913] env[69328]: INFO nova.compute.manager [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Took 10.98 seconds to spawn the instance on the hypervisor. [ 963.788110] env[69328]: DEBUG nova.compute.manager [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 963.788960] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c485016-22de-427b-b972-3acaf6672c51 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.936411] env[69328]: DEBUG nova.network.neutron [-] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 963.953230] env[69328]: DEBUG oslo_vmware.api [None req-9f77844c-f748-4d64-ad38-fcb4c51d97ec tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273670, 'name': ReconfigVM_Task, 'duration_secs': 0.429185} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.955117] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f77844c-f748-4d64-ad38-fcb4c51d97ec tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Reconfigured VM instance instance-00000046 to attach disk [datastore2] volume-a39fd325-7c9d-4482-b7a4-43b28bf52e5c/volume-a39fd325-7c9d-4482-b7a4-43b28bf52e5c.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 963.959866] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8f691223-2d5c-4704-a6f6-e6636237a5f3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.961918] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-39905e57-5811-419f-b050-0e65da49f94d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.980642] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78ba92f6-d25d-4a62-8737-e67a809a7c1a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.992338] env[69328]: DEBUG oslo_vmware.api [None req-9f77844c-f748-4d64-ad38-fcb4c51d97ec tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 963.992338] env[69328]: value = "task-3273672" [ 963.992338] env[69328]: _type = "Task" [ 963.992338] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.002865] env[69328]: DEBUG oslo_vmware.api [None req-9f77844c-f748-4d64-ad38-fcb4c51d97ec tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273672, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.021287] env[69328]: DEBUG nova.compute.manager [req-a81476df-1650-4d88-b8f3-5c2b026a8edd req-11dc06c2-27a9-40ef-a00e-7d78bf16ee09 service nova] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] Detach interface failed, port_id=322eba88-5363-41a8-a44d-50e0a7fdf92e, reason: Instance 9ad2b2e3-460a-403e-bfc7-f46648c93849 could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 964.030763] env[69328]: DEBUG oslo_vmware.api [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273671, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.081717] env[69328]: DEBUG oslo_concurrency.lockutils [None req-57bfa0df-fcd6-4e15-9bff-8dc79759f989 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "36f6aab5-2774-402b-9db6-9912f2d5d473" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 964.081954] env[69328]: DEBUG oslo_concurrency.lockutils [None req-57bfa0df-fcd6-4e15-9bff-8dc79759f989 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquired lock "36f6aab5-2774-402b-9db6-9912f2d5d473" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 964.082877] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca411628-ba60-4285-acf8-1d32a39fae05 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.105336] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c41dc31-ed16-42c4-90b7-a02482a2c07e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.112213] env[69328]: DEBUG oslo_vmware.api [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]520b278e-d958-5d86-b443-d0ca6eb5bd6b, 'name': SearchDatastore_Task, 'duration_secs': 0.013782} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.113543] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77e23277-e1a0-4a27-a190-492fc5214f70 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.141733] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-57bfa0df-fcd6-4e15-9bff-8dc79759f989 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Reconfiguring VM to detach interface {{(pid=69328) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 964.142725] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d74546da-b38f-4b2a-90db-f74221814ae8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.156992] env[69328]: DEBUG oslo_vmware.api [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 964.156992] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52bbb951-8220-0eba-b584-729c7db1124e" [ 964.156992] env[69328]: _type = "Task" [ 964.156992] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.163787] env[69328]: DEBUG oslo_vmware.api [None req-57bfa0df-fcd6-4e15-9bff-8dc79759f989 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for the task: (returnval){ [ 964.163787] env[69328]: value = "task-3273673" [ 964.163787] env[69328]: _type = "Task" [ 964.163787] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.169922] env[69328]: DEBUG oslo_vmware.api [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52bbb951-8220-0eba-b584-729c7db1124e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.176794] env[69328]: DEBUG oslo_vmware.api [None req-57bfa0df-fcd6-4e15-9bff-8dc79759f989 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273673, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.197193] env[69328]: DEBUG nova.scheduler.client.report [None req-8b9e830e-0a60-44c3-bc92-024100b179a6 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 964.207606] env[69328]: DEBUG oslo_concurrency.lockutils [req-a6463634-420a-4b6d-9f8c-1238ca2eb09b req-b5f7bc60-ba07-4003-aec9-4ddd4ac63c1e service nova] Releasing lock "refresh_cache-a95d01cf-c26b-466c-a5b6-a7e43f0321fa" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 964.318020] env[69328]: INFO nova.compute.manager [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Took 17.84 seconds to build instance. [ 964.440104] env[69328]: INFO nova.compute.manager [-] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] Took 1.30 seconds to deallocate network for instance. [ 964.502649] env[69328]: DEBUG oslo_vmware.api [None req-9f77844c-f748-4d64-ad38-fcb4c51d97ec tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273672, 'name': ReconfigVM_Task, 'duration_secs': 0.171366} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.502941] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f77844c-f748-4d64-ad38-fcb4c51d97ec tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653876', 'volume_id': 'a39fd325-7c9d-4482-b7a4-43b28bf52e5c', 'name': 'volume-a39fd325-7c9d-4482-b7a4-43b28bf52e5c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3ba646e8-a5c8-4917-a1c4-32b37affb598', 'attached_at': '', 'detached_at': '', 'volume_id': 'a39fd325-7c9d-4482-b7a4-43b28bf52e5c', 'serial': 'a39fd325-7c9d-4482-b7a4-43b28bf52e5c'} {{(pid=69328) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 964.530517] env[69328]: DEBUG oslo_vmware.api [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273671, 'name': PowerOnVM_Task, 'duration_secs': 0.563942} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.530752] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 964.530950] env[69328]: INFO nova.compute.manager [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Took 9.13 seconds to spawn the instance on the hypervisor. [ 964.531136] env[69328]: DEBUG nova.compute.manager [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 964.531934] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcc0a97f-d2f4-42ad-b47d-a1535f16a371 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.643490] env[69328]: DEBUG nova.compute.manager [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] [instance: 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 964.671048] env[69328]: DEBUG oslo_vmware.api [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52bbb951-8220-0eba-b584-729c7db1124e, 'name': SearchDatastore_Task, 'duration_secs': 0.021818} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.673341] env[69328]: DEBUG nova.virt.hardware [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 964.673602] env[69328]: DEBUG nova.virt.hardware [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 964.673757] env[69328]: DEBUG nova.virt.hardware [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 964.674015] env[69328]: DEBUG nova.virt.hardware [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 964.674190] env[69328]: DEBUG nova.virt.hardware [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 964.674339] env[69328]: DEBUG nova.virt.hardware [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 964.674689] env[69328]: DEBUG nova.virt.hardware [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 964.674689] env[69328]: DEBUG nova.virt.hardware [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 964.674847] env[69328]: DEBUG nova.virt.hardware [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 964.675016] env[69328]: DEBUG nova.virt.hardware [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 964.675200] env[69328]: DEBUG nova.virt.hardware [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 964.675846] env[69328]: DEBUG oslo_concurrency.lockutils [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 964.676111] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] a95d01cf-c26b-466c-a5b6-a7e43f0321fa/a95d01cf-c26b-466c-a5b6-a7e43f0321fa.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 964.676886] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1ccccf7-871d-469a-b33e-5646259b4dc1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.682356] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-40ddc50a-b416-41cf-8230-5ca50ca76e4e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.684574] env[69328]: DEBUG oslo_vmware.api [None req-57bfa0df-fcd6-4e15-9bff-8dc79759f989 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273673, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.691753] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fc4ba23-26c0-4d3d-9fdf-3b1ac9bb0583 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.695597] env[69328]: DEBUG oslo_vmware.api [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 964.695597] env[69328]: value = "task-3273674" [ 964.695597] env[69328]: _type = "Task" [ 964.695597] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.708555] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8b9e830e-0a60-44c3-bc92-024100b179a6 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.101s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 964.711520] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.671s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 964.713012] env[69328]: INFO nova.compute.claims [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 964.718278] env[69328]: DEBUG oslo_vmware.api [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273674, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.731035] env[69328]: INFO nova.scheduler.client.report [None req-8b9e830e-0a60-44c3-bc92-024100b179a6 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Deleted allocations for instance 18022645-9a2a-489e-b0b1-486165f46f14 [ 964.817877] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8738f2ed-fa3f-4189-9ed1-c3b2e7a553f2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "96f604a9-e42c-4aa8-b5b5-edcb34901d94" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.361s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 964.946136] env[69328]: DEBUG oslo_concurrency.lockutils [None req-be4368cc-235b-4293-8984-4bd2a2f178dd tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 965.039170] env[69328]: DEBUG nova.compute.manager [req-775e6f09-b244-4fca-9b41-038730102568 req-32d3b6cb-27b5-45d3-9e43-8982922a361c service nova] [instance: 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea] Received event network-vif-plugged-3d148298-6666-4189-8b2b-3048f28a8bb7 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 965.039170] env[69328]: DEBUG oslo_concurrency.lockutils [req-775e6f09-b244-4fca-9b41-038730102568 req-32d3b6cb-27b5-45d3-9e43-8982922a361c service nova] Acquiring lock "7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 965.039332] env[69328]: DEBUG oslo_concurrency.lockutils [req-775e6f09-b244-4fca-9b41-038730102568 req-32d3b6cb-27b5-45d3-9e43-8982922a361c service nova] Lock "7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 965.040437] env[69328]: DEBUG oslo_concurrency.lockutils [req-775e6f09-b244-4fca-9b41-038730102568 req-32d3b6cb-27b5-45d3-9e43-8982922a361c service nova] Lock "7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 965.040437] env[69328]: DEBUG nova.compute.manager [req-775e6f09-b244-4fca-9b41-038730102568 req-32d3b6cb-27b5-45d3-9e43-8982922a361c service nova] [instance: 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea] No waiting events found dispatching network-vif-plugged-3d148298-6666-4189-8b2b-3048f28a8bb7 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 965.040437] env[69328]: WARNING nova.compute.manager [req-775e6f09-b244-4fca-9b41-038730102568 req-32d3b6cb-27b5-45d3-9e43-8982922a361c service nova] [instance: 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea] Received unexpected event network-vif-plugged-3d148298-6666-4189-8b2b-3048f28a8bb7 for instance with vm_state building and task_state spawning. [ 965.050641] env[69328]: INFO nova.compute.manager [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Took 17.19 seconds to build instance. [ 965.139305] env[69328]: DEBUG nova.network.neutron [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] [instance: 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea] Successfully updated port: 3d148298-6666-4189-8b2b-3048f28a8bb7 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 965.178452] env[69328]: DEBUG oslo_vmware.api [None req-57bfa0df-fcd6-4e15-9bff-8dc79759f989 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273673, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.206021] env[69328]: DEBUG oslo_vmware.api [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273674, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.239222] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8b9e830e-0a60-44c3-bc92-024100b179a6 tempest-ServersTestBootFromVolume-1521618215 tempest-ServersTestBootFromVolume-1521618215-project-member] Lock "18022645-9a2a-489e-b0b1-486165f46f14" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.257s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 965.553418] env[69328]: DEBUG nova.objects.instance [None req-9f77844c-f748-4d64-ad38-fcb4c51d97ec tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lazy-loading 'flavor' on Instance uuid 3ba646e8-a5c8-4917-a1c4-32b37affb598 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 965.555103] env[69328]: DEBUG oslo_concurrency.lockutils [None req-903871e5-251a-42d7-bf5b-232ef6fc9309 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Lock "65e38a02-880b-46e2-8866-645a9fc17c7a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.707s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 965.644234] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Acquiring lock "refresh_cache-7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.644417] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Acquired lock "refresh_cache-7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 965.644865] env[69328]: DEBUG nova.network.neutron [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] [instance: 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 965.678563] env[69328]: DEBUG oslo_vmware.api [None req-57bfa0df-fcd6-4e15-9bff-8dc79759f989 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273673, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.708181] env[69328]: DEBUG oslo_vmware.api [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273674, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.571691} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.708479] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] a95d01cf-c26b-466c-a5b6-a7e43f0321fa/a95d01cf-c26b-466c-a5b6-a7e43f0321fa.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 965.708696] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 965.708967] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0ca4aa33-f61f-43dd-a682-78668e9af72c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.719351] env[69328]: DEBUG oslo_vmware.api [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 965.719351] env[69328]: value = "task-3273675" [ 965.719351] env[69328]: _type = "Task" [ 965.719351] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.734989] env[69328]: DEBUG oslo_vmware.api [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273675, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.060312] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9f77844c-f748-4d64-ad38-fcb4c51d97ec tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "3ba646e8-a5c8-4917-a1c4-32b37affb598" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.812s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 966.062729] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-401531d1-5b68-454f-825d-a8520b076fb5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.074795] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05879cf7-15b7-41c7-89a5-dcc352561b49 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.110570] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6983728a-7b89-4781-a921-83d3e1fb5297 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.121015] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2995fce-4052-4699-88d9-a6c935f93a9e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.137567] env[69328]: DEBUG nova.compute.provider_tree [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 966.176304] env[69328]: DEBUG oslo_vmware.api [None req-57bfa0df-fcd6-4e15-9bff-8dc79759f989 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273673, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.199906] env[69328]: DEBUG nova.network.neutron [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] [instance: 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 966.229971] env[69328]: DEBUG oslo_vmware.api [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273675, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070229} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.230263] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 966.231099] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72c036d9-4715-4ece-86f1-6fd7b3c98cff {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.264100] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] Reconfiguring VM instance instance-00000053 to attach disk [datastore2] a95d01cf-c26b-466c-a5b6-a7e43f0321fa/a95d01cf-c26b-466c-a5b6-a7e43f0321fa.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 966.264816] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dde79a30-9d02-4a66-aab8-3e6d893cf5a3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.295026] env[69328]: DEBUG oslo_vmware.api [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 966.295026] env[69328]: value = "task-3273676" [ 966.295026] env[69328]: _type = "Task" [ 966.295026] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.303943] env[69328]: DEBUG oslo_vmware.api [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273676, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.475890] env[69328]: DEBUG nova.network.neutron [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] [instance: 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea] Updating instance_info_cache with network_info: [{"id": "3d148298-6666-4189-8b2b-3048f28a8bb7", "address": "fa:16:3e:b7:95:e5", "network": {"id": "35366737-eba8-4579-a0a9-2b5621ab8c8d", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-726118299-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e9132cd9fbb84a24a9007c37e02261dd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "424fd631-4456-4ce2-8924-a2ed81d60bd6", "external-id": "nsx-vlan-transportzone-19", "segmentation_id": 19, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d148298-66", "ovs_interfaceid": "3d148298-6666-4189-8b2b-3048f28a8bb7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 966.642117] env[69328]: DEBUG nova.scheduler.client.report [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 966.680057] env[69328]: DEBUG oslo_vmware.api [None req-57bfa0df-fcd6-4e15-9bff-8dc79759f989 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273673, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.688225] env[69328]: DEBUG nova.compute.manager [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Stashing vm_state: active {{(pid=69328) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 966.804237] env[69328]: DEBUG oslo_vmware.api [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273676, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.978737] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Releasing lock "refresh_cache-7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 966.978737] env[69328]: DEBUG nova.compute.manager [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] [instance: 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea] Instance network_info: |[{"id": "3d148298-6666-4189-8b2b-3048f28a8bb7", "address": "fa:16:3e:b7:95:e5", "network": {"id": "35366737-eba8-4579-a0a9-2b5621ab8c8d", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-726118299-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e9132cd9fbb84a24a9007c37e02261dd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "424fd631-4456-4ce2-8924-a2ed81d60bd6", "external-id": "nsx-vlan-transportzone-19", "segmentation_id": 19, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d148298-66", "ovs_interfaceid": "3d148298-6666-4189-8b2b-3048f28a8bb7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 966.979295] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] [instance: 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b7:95:e5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '424fd631-4456-4ce2-8924-a2ed81d60bd6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3d148298-6666-4189-8b2b-3048f28a8bb7', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 966.987334] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Creating folder: Project (e9132cd9fbb84a24a9007c37e02261dd). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 966.987642] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4542860e-da06-4e2d-9aa3-4f55f77f6a75 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.002667] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Created folder: Project (e9132cd9fbb84a24a9007c37e02261dd) in parent group-v653649. [ 967.002667] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Creating folder: Instances. Parent ref: group-v653878. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 967.002667] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-15b145d0-e2f1-48a3-ba6d-703a0d5181ca {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.014094] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Created folder: Instances in parent group-v653878. [ 967.014683] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 967.014922] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 967.015259] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-edac3281-08f8-48c8-9c7c-e9d1c842764a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.035223] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 967.035223] env[69328]: value = "task-3273679" [ 967.035223] env[69328]: _type = "Task" [ 967.035223] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.043709] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273679, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.069076] env[69328]: DEBUG nova.compute.manager [req-2c504961-8277-413e-8d9f-5cbabe3da542 req-25eb7511-2250-43d2-bb60-598f2dfae712 service nova] [instance: 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea] Received event network-changed-3d148298-6666-4189-8b2b-3048f28a8bb7 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 967.069076] env[69328]: DEBUG nova.compute.manager [req-2c504961-8277-413e-8d9f-5cbabe3da542 req-25eb7511-2250-43d2-bb60-598f2dfae712 service nova] [instance: 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea] Refreshing instance network info cache due to event network-changed-3d148298-6666-4189-8b2b-3048f28a8bb7. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 967.069425] env[69328]: DEBUG oslo_concurrency.lockutils [req-2c504961-8277-413e-8d9f-5cbabe3da542 req-25eb7511-2250-43d2-bb60-598f2dfae712 service nova] Acquiring lock "refresh_cache-7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 967.069425] env[69328]: DEBUG oslo_concurrency.lockutils [req-2c504961-8277-413e-8d9f-5cbabe3da542 req-25eb7511-2250-43d2-bb60-598f2dfae712 service nova] Acquired lock "refresh_cache-7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 967.069621] env[69328]: DEBUG nova.network.neutron [req-2c504961-8277-413e-8d9f-5cbabe3da542 req-25eb7511-2250-43d2-bb60-598f2dfae712 service nova] [instance: 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea] Refreshing network info cache for port 3d148298-6666-4189-8b2b-3048f28a8bb7 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 967.107148] env[69328]: INFO nova.compute.manager [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Rebuilding instance [ 967.145417] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.434s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 967.146076] env[69328]: DEBUG nova.compute.manager [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 967.148859] env[69328]: DEBUG oslo_concurrency.lockutils [None req-773aa48b-a3af-4641-9c01-66d8aed29949 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.802s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 967.149921] env[69328]: DEBUG nova.objects.instance [None req-773aa48b-a3af-4641-9c01-66d8aed29949 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Lazy-loading 'resources' on Instance uuid 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 967.179547] env[69328]: DEBUG nova.compute.manager [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 967.179547] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e169ec5-2bdb-479b-a735-d026681d84c9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.190270] env[69328]: DEBUG oslo_vmware.api [None req-57bfa0df-fcd6-4e15-9bff-8dc79759f989 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273673, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.206802] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 967.303655] env[69328]: DEBUG oslo_vmware.api [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273676, 'name': ReconfigVM_Task, 'duration_secs': 0.525579} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.303940] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] Reconfigured VM instance instance-00000053 to attach disk [datastore2] a95d01cf-c26b-466c-a5b6-a7e43f0321fa/a95d01cf-c26b-466c-a5b6-a7e43f0321fa.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 967.304920] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-746afacc-931f-4b2a-b33d-f1cec622cb26 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.312507] env[69328]: DEBUG oslo_vmware.api [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 967.312507] env[69328]: value = "task-3273680" [ 967.312507] env[69328]: _type = "Task" [ 967.312507] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.321050] env[69328]: DEBUG oslo_vmware.api [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273680, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.552626] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273679, 'name': CreateVM_Task} progress is 25%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.653566] env[69328]: DEBUG nova.compute.utils [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 967.658372] env[69328]: DEBUG nova.compute.manager [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 967.658503] env[69328]: DEBUG nova.network.neutron [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 967.684147] env[69328]: DEBUG oslo_vmware.api [None req-57bfa0df-fcd6-4e15-9bff-8dc79759f989 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273673, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.753598] env[69328]: DEBUG nova.policy [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8d44d76c6830455f96cbd28736fb82f4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '95c9dcf3546a4d4ab45e934d09241d14', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 967.825404] env[69328]: DEBUG oslo_vmware.api [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273680, 'name': Rename_Task, 'duration_secs': 0.257617} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.825755] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 967.826034] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-932cfc3f-10c3-4a25-8a22-db2fe6c0e52d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.834469] env[69328]: DEBUG oslo_vmware.api [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 967.834469] env[69328]: value = "task-3273681" [ 967.834469] env[69328]: _type = "Task" [ 967.834469] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.842604] env[69328]: DEBUG oslo_vmware.api [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273681, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.901552] env[69328]: DEBUG nova.network.neutron [req-2c504961-8277-413e-8d9f-5cbabe3da542 req-25eb7511-2250-43d2-bb60-598f2dfae712 service nova] [instance: 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea] Updated VIF entry in instance network info cache for port 3d148298-6666-4189-8b2b-3048f28a8bb7. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 967.901950] env[69328]: DEBUG nova.network.neutron [req-2c504961-8277-413e-8d9f-5cbabe3da542 req-25eb7511-2250-43d2-bb60-598f2dfae712 service nova] [instance: 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea] Updating instance_info_cache with network_info: [{"id": "3d148298-6666-4189-8b2b-3048f28a8bb7", "address": "fa:16:3e:b7:95:e5", "network": {"id": "35366737-eba8-4579-a0a9-2b5621ab8c8d", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-726118299-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e9132cd9fbb84a24a9007c37e02261dd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "424fd631-4456-4ce2-8924-a2ed81d60bd6", "external-id": "nsx-vlan-transportzone-19", "segmentation_id": 19, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d148298-66", "ovs_interfaceid": "3d148298-6666-4189-8b2b-3048f28a8bb7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 968.048223] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273679, 'name': CreateVM_Task, 'duration_secs': 0.683866} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.050961] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 968.051868] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 968.052036] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 968.052344] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 968.052595] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7e4a83a2-0ce2-4bb2-924d-a197bb4e1c92 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.057898] env[69328]: DEBUG oslo_vmware.api [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Waiting for the task: (returnval){ [ 968.057898] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d1410b-26c9-3494-d9d6-b22d271c80bc" [ 968.057898] env[69328]: _type = "Task" [ 968.057898] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.063524] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecaff247-0546-4dad-9d47-080e7c2933e5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.068695] env[69328]: DEBUG oslo_vmware.api [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d1410b-26c9-3494-d9d6-b22d271c80bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.073759] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6efb3c77-9895-4627-a945-cd7e49ad9a24 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.107204] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86ae0be6-5cce-4754-af2a-22aab09a1761 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.115938] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e54a4cd-d8f1-4459-a9d2-c9e4db1528c7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.131568] env[69328]: DEBUG nova.compute.provider_tree [None req-773aa48b-a3af-4641-9c01-66d8aed29949 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 968.159146] env[69328]: DEBUG nova.compute.manager [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 968.184691] env[69328]: DEBUG oslo_vmware.api [None req-57bfa0df-fcd6-4e15-9bff-8dc79759f989 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273673, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.206252] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 968.206514] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-55517e9e-e642-44cf-accc-9700d5e3cf0e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.215347] env[69328]: DEBUG oslo_vmware.api [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 968.215347] env[69328]: value = "task-3273682" [ 968.215347] env[69328]: _type = "Task" [ 968.215347] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.228387] env[69328]: DEBUG oslo_vmware.api [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273682, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.231701] env[69328]: DEBUG nova.network.neutron [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] Successfully created port: f7b2aa6f-1dee-4050-84ad-2ea0e567211b {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 968.347705] env[69328]: DEBUG oslo_vmware.api [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273681, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.404726] env[69328]: DEBUG oslo_concurrency.lockutils [req-2c504961-8277-413e-8d9f-5cbabe3da542 req-25eb7511-2250-43d2-bb60-598f2dfae712 service nova] Releasing lock "refresh_cache-7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 968.573023] env[69328]: DEBUG oslo_vmware.api [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d1410b-26c9-3494-d9d6-b22d271c80bc, 'name': SearchDatastore_Task, 'duration_secs': 0.024357} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.573359] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 968.573582] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] [instance: 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 968.573876] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 968.574055] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 968.574244] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 968.574509] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-985ce4f4-a836-41e7-b0c6-b12800ac60d2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.588105] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 968.588407] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 968.589220] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-923a5169-da67-4905-951b-0f86c30141b7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.596381] env[69328]: DEBUG oslo_vmware.api [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Waiting for the task: (returnval){ [ 968.596381] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52828c8b-0f78-82b2-4922-0f8fe1fe97e5" [ 968.596381] env[69328]: _type = "Task" [ 968.596381] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.606726] env[69328]: DEBUG oslo_vmware.api [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52828c8b-0f78-82b2-4922-0f8fe1fe97e5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.635218] env[69328]: DEBUG nova.scheduler.client.report [None req-773aa48b-a3af-4641-9c01-66d8aed29949 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 968.685819] env[69328]: DEBUG oslo_vmware.api [None req-57bfa0df-fcd6-4e15-9bff-8dc79759f989 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273673, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.735807] env[69328]: DEBUG oslo_vmware.api [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273682, 'name': PowerOffVM_Task, 'duration_secs': 0.24301} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.736359] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 968.812114] env[69328]: INFO nova.compute.manager [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Detaching volume a39fd325-7c9d-4482-b7a4-43b28bf52e5c [ 968.847499] env[69328]: DEBUG oslo_vmware.api [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273681, 'name': PowerOnVM_Task, 'duration_secs': 0.914909} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.847855] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 968.848093] env[69328]: INFO nova.compute.manager [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] Took 8.85 seconds to spawn the instance on the hypervisor. [ 968.848273] env[69328]: DEBUG nova.compute.manager [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 968.849093] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60dc3775-7e80-457a-b5a3-e2e8b9bd3a08 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.852338] env[69328]: INFO nova.virt.block_device [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Attempting to driver detach volume a39fd325-7c9d-4482-b7a4-43b28bf52e5c from mountpoint /dev/sdb [ 968.852533] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Volume detach. Driver type: vmdk {{(pid=69328) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 968.852712] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653876', 'volume_id': 'a39fd325-7c9d-4482-b7a4-43b28bf52e5c', 'name': 'volume-a39fd325-7c9d-4482-b7a4-43b28bf52e5c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3ba646e8-a5c8-4917-a1c4-32b37affb598', 'attached_at': '', 'detached_at': '', 'volume_id': 'a39fd325-7c9d-4482-b7a4-43b28bf52e5c', 'serial': 'a39fd325-7c9d-4482-b7a4-43b28bf52e5c'} {{(pid=69328) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 968.853445] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc9f22a6-6f2c-4a0b-998c-34f6ba68e3fb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.880292] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db05e5df-efc1-4acc-8f1b-60268e418feb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.888742] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3c2ae56-193a-43ff-9380-1da978f69c18 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.913825] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6ae0ce1-c3e9-4e76-bf74-770e4bb099c8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.929837] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] The volume has not been displaced from its original location: [datastore2] volume-a39fd325-7c9d-4482-b7a4-43b28bf52e5c/volume-a39fd325-7c9d-4482-b7a4-43b28bf52e5c.vmdk. No consolidation needed. {{(pid=69328) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 968.935158] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Reconfiguring VM instance instance-00000046 to detach disk 2001 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 968.935678] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fd103993-a847-4e5e-a708-5a983acaf5da {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.957067] env[69328]: DEBUG oslo_vmware.api [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 968.957067] env[69328]: value = "task-3273683" [ 968.957067] env[69328]: _type = "Task" [ 968.957067] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.964784] env[69328]: DEBUG oslo_vmware.api [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273683, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.107618] env[69328]: DEBUG oslo_vmware.api [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52828c8b-0f78-82b2-4922-0f8fe1fe97e5, 'name': SearchDatastore_Task, 'duration_secs': 0.010469} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.108421] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-07f4ae36-222e-40aa-9730-119bc93124fd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.114015] env[69328]: DEBUG oslo_vmware.api [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Waiting for the task: (returnval){ [ 969.114015] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]529f703b-ed3a-a3d0-ba17-936def3eb8f6" [ 969.114015] env[69328]: _type = "Task" [ 969.114015] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.121708] env[69328]: DEBUG oslo_vmware.api [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]529f703b-ed3a-a3d0-ba17-936def3eb8f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.143274] env[69328]: DEBUG oslo_concurrency.lockutils [None req-773aa48b-a3af-4641-9c01-66d8aed29949 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.994s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 969.146873] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.701s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 969.151534] env[69328]: INFO nova.compute.claims [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 969.170178] env[69328]: DEBUG nova.compute.manager [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 969.173496] env[69328]: INFO nova.scheduler.client.report [None req-773aa48b-a3af-4641-9c01-66d8aed29949 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Deleted allocations for instance 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4 [ 969.186882] env[69328]: DEBUG oslo_vmware.api [None req-57bfa0df-fcd6-4e15-9bff-8dc79759f989 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273673, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.206134] env[69328]: DEBUG nova.virt.hardware [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 969.206571] env[69328]: DEBUG nova.virt.hardware [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 969.206747] env[69328]: DEBUG nova.virt.hardware [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 969.206928] env[69328]: DEBUG nova.virt.hardware [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 969.207581] env[69328]: DEBUG nova.virt.hardware [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 969.207790] env[69328]: DEBUG nova.virt.hardware [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 969.208434] env[69328]: DEBUG nova.virt.hardware [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 969.208602] env[69328]: DEBUG nova.virt.hardware [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 969.208799] env[69328]: DEBUG nova.virt.hardware [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 969.208961] env[69328]: DEBUG nova.virt.hardware [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 969.209144] env[69328]: DEBUG nova.virt.hardware [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 969.210065] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9cb1c0f-769a-41e7-8cbd-b5b6e93c4738 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.222249] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aae43857-9814-4eb5-ba72-a56f42427c9f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.394596] env[69328]: INFO nova.compute.manager [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] Took 20.87 seconds to build instance. [ 969.471408] env[69328]: DEBUG oslo_vmware.api [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273683, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.628733] env[69328]: DEBUG oslo_vmware.api [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]529f703b-ed3a-a3d0-ba17-936def3eb8f6, 'name': SearchDatastore_Task, 'duration_secs': 0.038746} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.629197] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 969.629565] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea/7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 969.629926] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-35207ab4-fb79-496c-988c-c19e02f4df9b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.637950] env[69328]: DEBUG oslo_vmware.api [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Waiting for the task: (returnval){ [ 969.637950] env[69328]: value = "task-3273684" [ 969.637950] env[69328]: _type = "Task" [ 969.637950] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.649475] env[69328]: DEBUG oslo_vmware.api [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Task: {'id': task-3273684, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.690974] env[69328]: DEBUG oslo_vmware.api [None req-57bfa0df-fcd6-4e15-9bff-8dc79759f989 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273673, 'name': ReconfigVM_Task} progress is 18%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.691461] env[69328]: DEBUG oslo_concurrency.lockutils [None req-773aa48b-a3af-4641-9c01-66d8aed29949 tempest-ServersTestFqdnHostnames-1173412037 tempest-ServersTestFqdnHostnames-1173412037-project-member] Lock "8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.550s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 969.903026] env[69328]: DEBUG oslo_concurrency.lockutils [None req-96ce3e15-7e11-4d2a-aebf-191bd8abbca7 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Lock "a95d01cf-c26b-466c-a5b6-a7e43f0321fa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.386s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 969.970391] env[69328]: DEBUG oslo_vmware.api [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273683, 'name': ReconfigVM_Task, 'duration_secs': 0.514984} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.970771] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Reconfigured VM instance instance-00000046 to detach disk 2001 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 969.977293] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1b104639-8343-4d76-ba12-29ea30af4fbd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.997124] env[69328]: DEBUG oslo_vmware.api [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 969.997124] env[69328]: value = "task-3273685" [ 969.997124] env[69328]: _type = "Task" [ 969.997124] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.009639] env[69328]: DEBUG oslo_vmware.api [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273685, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.151137] env[69328]: DEBUG oslo_vmware.api [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Task: {'id': task-3273684, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.193777] env[69328]: DEBUG oslo_vmware.api [None req-57bfa0df-fcd6-4e15-9bff-8dc79759f989 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273673, 'name': ReconfigVM_Task, 'duration_secs': 5.773306} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.195309] env[69328]: DEBUG oslo_concurrency.lockutils [None req-57bfa0df-fcd6-4e15-9bff-8dc79759f989 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Releasing lock "36f6aab5-2774-402b-9db6-9912f2d5d473" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 970.199215] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-57bfa0df-fcd6-4e15-9bff-8dc79759f989 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Reconfigured VM to detach interface {{(pid=69328) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 970.295205] env[69328]: DEBUG nova.network.neutron [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] Successfully updated port: f7b2aa6f-1dee-4050-84ad-2ea0e567211b {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 970.339404] env[69328]: DEBUG nova.compute.manager [req-250501f1-31f3-49b8-9b86-822bec8566a9 req-a3255535-11e6-43ae-8c77-32c11e2f4d0a service nova] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] Received event network-vif-plugged-f7b2aa6f-1dee-4050-84ad-2ea0e567211b {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 970.339651] env[69328]: DEBUG oslo_concurrency.lockutils [req-250501f1-31f3-49b8-9b86-822bec8566a9 req-a3255535-11e6-43ae-8c77-32c11e2f4d0a service nova] Acquiring lock "772ab9b3-23ac-46c6-acb1-af0b2726fd90-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 970.340883] env[69328]: DEBUG oslo_concurrency.lockutils [req-250501f1-31f3-49b8-9b86-822bec8566a9 req-a3255535-11e6-43ae-8c77-32c11e2f4d0a service nova] Lock "772ab9b3-23ac-46c6-acb1-af0b2726fd90-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 970.340883] env[69328]: DEBUG oslo_concurrency.lockutils [req-250501f1-31f3-49b8-9b86-822bec8566a9 req-a3255535-11e6-43ae-8c77-32c11e2f4d0a service nova] Lock "772ab9b3-23ac-46c6-acb1-af0b2726fd90-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 970.341685] env[69328]: DEBUG nova.compute.manager [req-250501f1-31f3-49b8-9b86-822bec8566a9 req-a3255535-11e6-43ae-8c77-32c11e2f4d0a service nova] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] No waiting events found dispatching network-vif-plugged-f7b2aa6f-1dee-4050-84ad-2ea0e567211b {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 970.341909] env[69328]: WARNING nova.compute.manager [req-250501f1-31f3-49b8-9b86-822bec8566a9 req-a3255535-11e6-43ae-8c77-32c11e2f4d0a service nova] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] Received unexpected event network-vif-plugged-f7b2aa6f-1dee-4050-84ad-2ea0e567211b for instance with vm_state building and task_state spawning. [ 970.512574] env[69328]: DEBUG oslo_vmware.api [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273685, 'name': ReconfigVM_Task, 'duration_secs': 0.244898} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.512574] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653876', 'volume_id': 'a39fd325-7c9d-4482-b7a4-43b28bf52e5c', 'name': 'volume-a39fd325-7c9d-4482-b7a4-43b28bf52e5c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3ba646e8-a5c8-4917-a1c4-32b37affb598', 'attached_at': '', 'detached_at': '', 'volume_id': 'a39fd325-7c9d-4482-b7a4-43b28bf52e5c', 'serial': 'a39fd325-7c9d-4482-b7a4-43b28bf52e5c'} {{(pid=69328) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 970.570254] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ebaaea9-f1ae-41dc-af07-33bb5b934251 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.579376] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bc6727a-5d56-407d-8dc4-43be869ca9c4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.618146] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a5ae225-f8b5-441d-b960-3fddb3391437 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.626226] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f436718-38c0-4e9f-8b3b-6cd39cff564f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.640983] env[69328]: DEBUG nova.compute.provider_tree [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 970.663768] env[69328]: DEBUG oslo_vmware.api [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Task: {'id': task-3273684, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.587234} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.664042] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea/7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 970.664263] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] [instance: 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 970.664535] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9669bcac-2b6b-410b-9017-2939bc348d58 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.673241] env[69328]: DEBUG oslo_vmware.api [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Waiting for the task: (returnval){ [ 970.673241] env[69328]: value = "task-3273686" [ 970.673241] env[69328]: _type = "Task" [ 970.673241] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.683674] env[69328]: DEBUG oslo_vmware.api [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Task: {'id': task-3273686, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.764815] env[69328]: DEBUG nova.compute.manager [req-b6ff71f0-68d7-479a-8816-3516e92c9972 req-a1998e3b-187d-4e61-810b-a070329664b4 service nova] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Received event network-vif-deleted-3cd046f6-ec54-42ea-acae-2410cbcf3a47 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 970.765337] env[69328]: INFO nova.compute.manager [req-b6ff71f0-68d7-479a-8816-3516e92c9972 req-a1998e3b-187d-4e61-810b-a070329664b4 service nova] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Neutron deleted interface 3cd046f6-ec54-42ea-acae-2410cbcf3a47; detaching it from the instance and deleting it from the info cache [ 970.765732] env[69328]: DEBUG nova.network.neutron [req-b6ff71f0-68d7-479a-8816-3516e92c9972 req-a1998e3b-187d-4e61-810b-a070329664b4 service nova] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Updating instance_info_cache with network_info: [{"id": "766304d2-5559-4007-9fa4-a01027d56e49", "address": "fa:16:3e:7b:c7:c8", "network": {"id": "aed15283-4a79-4e99-8b6c-49cf754138de", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1271042528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.145", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30209bc93a4042488f15c73b7e4733d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap766304d2-55", "ovs_interfaceid": "766304d2-5559-4007-9fa4-a01027d56e49", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 970.797189] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Acquiring lock "refresh_cache-772ab9b3-23ac-46c6-acb1-af0b2726fd90" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 970.797397] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Acquired lock "refresh_cache-772ab9b3-23ac-46c6-acb1-af0b2726fd90" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 970.797578] env[69328]: DEBUG nova.network.neutron [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 971.151167] env[69328]: DEBUG nova.scheduler.client.report [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 971.187535] env[69328]: DEBUG oslo_vmware.api [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Task: {'id': task-3273686, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07166} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.188948] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] [instance: 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 971.190023] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d55fb1e9-09d7-4621-ba22-3dfa70cb5485 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.216853] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] [instance: 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea] Reconfiguring VM instance instance-00000054 to attach disk [datastore1] 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea/7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 971.217507] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8bd9291f-0ae7-430e-80f0-a214fc6bff82 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.241138] env[69328]: DEBUG oslo_vmware.api [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Waiting for the task: (returnval){ [ 971.241138] env[69328]: value = "task-3273687" [ 971.241138] env[69328]: _type = "Task" [ 971.241138] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.250917] env[69328]: DEBUG oslo_vmware.api [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Task: {'id': task-3273687, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.270449] env[69328]: DEBUG oslo_concurrency.lockutils [req-b6ff71f0-68d7-479a-8816-3516e92c9972 req-a1998e3b-187d-4e61-810b-a070329664b4 service nova] Acquiring lock "36f6aab5-2774-402b-9db6-9912f2d5d473" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 971.270590] env[69328]: DEBUG oslo_concurrency.lockutils [req-b6ff71f0-68d7-479a-8816-3516e92c9972 req-a1998e3b-187d-4e61-810b-a070329664b4 service nova] Acquired lock "36f6aab5-2774-402b-9db6-9912f2d5d473" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 971.271550] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f94c78c1-728f-47bf-bdc2-6bfb530bb1fb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.298862] env[69328]: DEBUG oslo_concurrency.lockutils [req-b6ff71f0-68d7-479a-8816-3516e92c9972 req-a1998e3b-187d-4e61-810b-a070329664b4 service nova] Releasing lock "36f6aab5-2774-402b-9db6-9912f2d5d473" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 971.298862] env[69328]: WARNING nova.compute.manager [req-b6ff71f0-68d7-479a-8816-3516e92c9972 req-a1998e3b-187d-4e61-810b-a070329664b4 service nova] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Detach interface failed, port_id=3cd046f6-ec54-42ea-acae-2410cbcf3a47, reason: No device with interface-id 3cd046f6-ec54-42ea-acae-2410cbcf3a47 exists on VM: nova.exception.NotFound: No device with interface-id 3cd046f6-ec54-42ea-acae-2410cbcf3a47 exists on VM [ 971.341136] env[69328]: DEBUG nova.network.neutron [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 971.587289] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 971.587603] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-94dbde4a-ce1a-4487-891c-8dc9f961a00d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.598037] env[69328]: DEBUG oslo_vmware.api [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 971.598037] env[69328]: value = "task-3273688" [ 971.598037] env[69328]: _type = "Task" [ 971.598037] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.618135] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] VM already powered off {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 971.618487] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Volume detach. Driver type: vmdk {{(pid=69328) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 971.618842] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653876', 'volume_id': 'a39fd325-7c9d-4482-b7a4-43b28bf52e5c', 'name': 'volume-a39fd325-7c9d-4482-b7a4-43b28bf52e5c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3ba646e8-a5c8-4917-a1c4-32b37affb598', 'attached_at': '', 'detached_at': '', 'volume_id': 'a39fd325-7c9d-4482-b7a4-43b28bf52e5c', 'serial': 'a39fd325-7c9d-4482-b7a4-43b28bf52e5c'} {{(pid=69328) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 971.620147] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5429d5e5-3bf7-4753-861d-23d31c7db2af {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.645233] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7023ef5d-a389-4300-9046-d29704871d88 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.655407] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.509s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 971.656128] env[69328]: DEBUG nova.compute.manager [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 971.659319] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.498s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 971.661092] env[69328]: INFO nova.compute.claims [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 971.666697] env[69328]: WARNING nova.virt.vmwareapi.driver [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] The volume None does not exist!: nova.exception.DiskNotFound: Unable to find volume [ 971.667085] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 971.668180] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2833ef63-435c-48d8-91e9-78720ac1c35e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.673131] env[69328]: DEBUG oslo_concurrency.lockutils [None req-57bfa0df-fcd6-4e15-9bff-8dc79759f989 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "refresh_cache-36f6aab5-2774-402b-9db6-9912f2d5d473" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 971.673305] env[69328]: DEBUG oslo_concurrency.lockutils [None req-57bfa0df-fcd6-4e15-9bff-8dc79759f989 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquired lock "refresh_cache-36f6aab5-2774-402b-9db6-9912f2d5d473" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 971.673534] env[69328]: DEBUG nova.network.neutron [None req-57bfa0df-fcd6-4e15-9bff-8dc79759f989 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 971.683166] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 971.683166] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-26774261-37a3-413e-be0a-77f96e1e43eb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.753632] env[69328]: DEBUG oslo_vmware.api [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Task: {'id': task-3273687, 'name': ReconfigVM_Task, 'duration_secs': 0.506526} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.754023] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] [instance: 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea] Reconfigured VM instance instance-00000054 to attach disk [datastore1] 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea/7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 971.754954] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b0a977fc-0ea0-4fb7-8243-89ffe64a539f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.763354] env[69328]: DEBUG oslo_vmware.api [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Waiting for the task: (returnval){ [ 971.763354] env[69328]: value = "task-3273690" [ 971.763354] env[69328]: _type = "Task" [ 971.763354] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.768299] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 971.769311] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 971.769311] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Deleting the datastore file [datastore2] 3ba646e8-a5c8-4917-a1c4-32b37affb598 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 971.769856] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5e59b2e3-c5e8-4a26-90fa-dced571cc770 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.776145] env[69328]: DEBUG oslo_vmware.api [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Task: {'id': task-3273690, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.780023] env[69328]: DEBUG oslo_vmware.api [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 971.780023] env[69328]: value = "task-3273691" [ 971.780023] env[69328]: _type = "Task" [ 971.780023] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.789346] env[69328]: DEBUG oslo_vmware.api [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273691, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.868839] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquiring lock "9f6f8e97-cb21-4984-af08-a63ea4578eef" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 971.869171] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Lock "9f6f8e97-cb21-4984-af08-a63ea4578eef" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 971.927479] env[69328]: DEBUG nova.network.neutron [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] Updating instance_info_cache with network_info: [{"id": "f7b2aa6f-1dee-4050-84ad-2ea0e567211b", "address": "fa:16:3e:df:63:88", "network": {"id": "14cd88b6-4368-49ff-9977-49a60a9fadc7", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-930447636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "95c9dcf3546a4d4ab45e934d09241d14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7b2aa6f-1d", "ovs_interfaceid": "f7b2aa6f-1dee-4050-84ad-2ea0e567211b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 971.979505] env[69328]: DEBUG oslo_concurrency.lockutils [None req-19c2c942-1425-475c-9f96-3dfa57ef38cb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "36f6aab5-2774-402b-9db6-9912f2d5d473" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 971.979505] env[69328]: DEBUG oslo_concurrency.lockutils [None req-19c2c942-1425-475c-9f96-3dfa57ef38cb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "36f6aab5-2774-402b-9db6-9912f2d5d473" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 971.979505] env[69328]: DEBUG oslo_concurrency.lockutils [None req-19c2c942-1425-475c-9f96-3dfa57ef38cb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "36f6aab5-2774-402b-9db6-9912f2d5d473-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 971.979505] env[69328]: DEBUG oslo_concurrency.lockutils [None req-19c2c942-1425-475c-9f96-3dfa57ef38cb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "36f6aab5-2774-402b-9db6-9912f2d5d473-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 971.979505] env[69328]: DEBUG oslo_concurrency.lockutils [None req-19c2c942-1425-475c-9f96-3dfa57ef38cb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "36f6aab5-2774-402b-9db6-9912f2d5d473-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 971.983224] env[69328]: INFO nova.compute.manager [None req-19c2c942-1425-475c-9f96-3dfa57ef38cb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Terminating instance [ 972.169587] env[69328]: DEBUG nova.compute.utils [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 972.173172] env[69328]: DEBUG nova.compute.manager [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 972.173303] env[69328]: DEBUG nova.network.neutron [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 972.276346] env[69328]: DEBUG oslo_vmware.api [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Task: {'id': task-3273690, 'name': Rename_Task, 'duration_secs': 0.399779} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.278241] env[69328]: DEBUG nova.policy [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '047fba3350d249e6b48eda735fc10786', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'efd0e2d2f9ba4416bd8fd08dad912465', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 972.279343] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] [instance: 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 972.279669] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-50d2570c-0a2a-4067-8b90-a6a82f52a71c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.296850] env[69328]: DEBUG oslo_vmware.api [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273691, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.340591} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.297977] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 972.298192] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 972.298521] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 972.301934] env[69328]: DEBUG oslo_vmware.api [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Waiting for the task: (returnval){ [ 972.301934] env[69328]: value = "task-3273692" [ 972.301934] env[69328]: _type = "Task" [ 972.301934] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.372042] env[69328]: DEBUG nova.compute.manager [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 972.432418] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Releasing lock "refresh_cache-772ab9b3-23ac-46c6-acb1-af0b2726fd90" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 972.433174] env[69328]: DEBUG nova.compute.manager [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] Instance network_info: |[{"id": "f7b2aa6f-1dee-4050-84ad-2ea0e567211b", "address": "fa:16:3e:df:63:88", "network": {"id": "14cd88b6-4368-49ff-9977-49a60a9fadc7", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-930447636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "95c9dcf3546a4d4ab45e934d09241d14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7b2aa6f-1d", "ovs_interfaceid": "f7b2aa6f-1dee-4050-84ad-2ea0e567211b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 972.433485] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:df:63:88', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '78fd2e0c-4fd2-4d81-8780-aa94237670c0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f7b2aa6f-1dee-4050-84ad-2ea0e567211b', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 972.440938] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Creating folder: Project (95c9dcf3546a4d4ab45e934d09241d14). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 972.441912] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-22071881-2293-49ea-bed4-cf1e8baa62b4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.456086] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Created folder: Project (95c9dcf3546a4d4ab45e934d09241d14) in parent group-v653649. [ 972.456213] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Creating folder: Instances. Parent ref: group-v653881. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 972.456459] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d761fdb7-418a-46f2-9bb7-6e2c6bc1d7f1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.469588] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Created folder: Instances in parent group-v653881. [ 972.469997] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 972.470302] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 972.470542] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1dbec776-2c22-4741-be1f-9a9a446c6824 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.486869] env[69328]: DEBUG nova.network.neutron [None req-57bfa0df-fcd6-4e15-9bff-8dc79759f989 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Updating instance_info_cache with network_info: [{"id": "766304d2-5559-4007-9fa4-a01027d56e49", "address": "fa:16:3e:7b:c7:c8", "network": {"id": "aed15283-4a79-4e99-8b6c-49cf754138de", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1271042528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.145", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30209bc93a4042488f15c73b7e4733d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap766304d2-55", "ovs_interfaceid": "766304d2-5559-4007-9fa4-a01027d56e49", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 972.489420] env[69328]: DEBUG nova.compute.manager [None req-19c2c942-1425-475c-9f96-3dfa57ef38cb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 972.489642] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-19c2c942-1425-475c-9f96-3dfa57ef38cb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 972.490487] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b49e3f18-3d04-4b59-be3d-5952746537ff {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.502428] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 972.502428] env[69328]: value = "task-3273695" [ 972.502428] env[69328]: _type = "Task" [ 972.502428] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.502782] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-19c2c942-1425-475c-9f96-3dfa57ef38cb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 972.503819] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ba1cb196-d108-46ba-b9df-3e5e0d9b70d1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.516286] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273695, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.517222] env[69328]: DEBUG oslo_vmware.api [None req-19c2c942-1425-475c-9f96-3dfa57ef38cb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for the task: (returnval){ [ 972.517222] env[69328]: value = "task-3273696" [ 972.517222] env[69328]: _type = "Task" [ 972.517222] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.527491] env[69328]: DEBUG oslo_vmware.api [None req-19c2c942-1425-475c-9f96-3dfa57ef38cb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273696, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.573845] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9fff04c7-c0f7-43d9-9f9e-4f1b4cddc38c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquiring lock "4d320c76-45bb-451c-8fbb-3dd2d64f56d5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 972.574174] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9fff04c7-c0f7-43d9-9f9e-4f1b4cddc38c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "4d320c76-45bb-451c-8fbb-3dd2d64f56d5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 972.574400] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9fff04c7-c0f7-43d9-9f9e-4f1b4cddc38c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquiring lock "4d320c76-45bb-451c-8fbb-3dd2d64f56d5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 972.574635] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9fff04c7-c0f7-43d9-9f9e-4f1b4cddc38c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "4d320c76-45bb-451c-8fbb-3dd2d64f56d5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 972.574812] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9fff04c7-c0f7-43d9-9f9e-4f1b4cddc38c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "4d320c76-45bb-451c-8fbb-3dd2d64f56d5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 972.577263] env[69328]: INFO nova.compute.manager [None req-9fff04c7-c0f7-43d9-9f9e-4f1b4cddc38c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Terminating instance [ 972.641149] env[69328]: DEBUG nova.compute.manager [req-1e190240-f818-4211-9f25-5d64243f4472 req-fd615671-3be2-4963-8eae-7c7a1cc25f92 service nova] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] Received event network-changed-f7b2aa6f-1dee-4050-84ad-2ea0e567211b {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 972.641345] env[69328]: DEBUG nova.compute.manager [req-1e190240-f818-4211-9f25-5d64243f4472 req-fd615671-3be2-4963-8eae-7c7a1cc25f92 service nova] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] Refreshing instance network info cache due to event network-changed-f7b2aa6f-1dee-4050-84ad-2ea0e567211b. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 972.641726] env[69328]: DEBUG oslo_concurrency.lockutils [req-1e190240-f818-4211-9f25-5d64243f4472 req-fd615671-3be2-4963-8eae-7c7a1cc25f92 service nova] Acquiring lock "refresh_cache-772ab9b3-23ac-46c6-acb1-af0b2726fd90" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 972.642131] env[69328]: DEBUG oslo_concurrency.lockutils [req-1e190240-f818-4211-9f25-5d64243f4472 req-fd615671-3be2-4963-8eae-7c7a1cc25f92 service nova] Acquired lock "refresh_cache-772ab9b3-23ac-46c6-acb1-af0b2726fd90" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 972.643858] env[69328]: DEBUG nova.network.neutron [req-1e190240-f818-4211-9f25-5d64243f4472 req-fd615671-3be2-4963-8eae-7c7a1cc25f92 service nova] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] Refreshing network info cache for port f7b2aa6f-1dee-4050-84ad-2ea0e567211b {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 972.673723] env[69328]: DEBUG nova.compute.manager [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 972.805832] env[69328]: INFO nova.virt.block_device [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Booting with volume a39fd325-7c9d-4482-b7a4-43b28bf52e5c at /dev/sdb [ 972.823862] env[69328]: DEBUG oslo_vmware.api [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Task: {'id': task-3273692, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.865470] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-32873efc-5e29-4a0e-906f-a04170b4ebf1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.881849] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b7cc26c-bed3-48b5-866e-3af09fb3a86e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.932765] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 972.933721] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ce7396b5-f7bd-4e7c-8371-d9f9f56b03cf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.944916] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f353876-0c2e-409b-82b0-074f6d9fd286 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.993537] env[69328]: DEBUG oslo_concurrency.lockutils [None req-57bfa0df-fcd6-4e15-9bff-8dc79759f989 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Releasing lock "refresh_cache-36f6aab5-2774-402b-9db6-9912f2d5d473" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 972.999419] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b82cc7c-bd7a-4f25-838a-3bed9ea93017 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.004153] env[69328]: DEBUG oslo_concurrency.lockutils [None req-57bfa0df-fcd6-4e15-9bff-8dc79759f989 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "interface-36f6aab5-2774-402b-9db6-9912f2d5d473-3cd046f6-ec54-42ea-acae-2410cbcf3a47" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.426s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 973.014810] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ae48f19-3fe2-4b4d-a440-8a225d49f884 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.021902] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273695, 'name': CreateVM_Task, 'duration_secs': 0.442615} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.029031] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 973.030786] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 973.031034] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 973.031413] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 973.032355] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-73e51ccd-11bf-4feb-94e4-c7a6b11e1ca4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.037157] env[69328]: DEBUG oslo_vmware.api [None req-19c2c942-1425-475c-9f96-3dfa57ef38cb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273696, 'name': PowerOffVM_Task, 'duration_secs': 0.228664} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.037883] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-19c2c942-1425-475c-9f96-3dfa57ef38cb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 973.038086] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-19c2c942-1425-475c-9f96-3dfa57ef38cb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 973.042354] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b3aaac00-20bc-4513-8e71-d0be7644dee9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.044446] env[69328]: DEBUG nova.virt.block_device [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Updating existing volume attachment record: 8325a068-f5bd-4258-b353-fcb69a59645d {{(pid=69328) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 973.048031] env[69328]: DEBUG oslo_vmware.api [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Waiting for the task: (returnval){ [ 973.048031] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5282863c-7d88-2626-646f-96b7a122dca2" [ 973.048031] env[69328]: _type = "Task" [ 973.048031] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.063700] env[69328]: DEBUG oslo_vmware.api [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5282863c-7d88-2626-646f-96b7a122dca2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.074719] env[69328]: DEBUG nova.network.neutron [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Successfully created port: 7da3de27-ee87-400f-ae26-a3a6995a8363 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 973.082913] env[69328]: DEBUG nova.compute.manager [None req-9fff04c7-c0f7-43d9-9f9e-4f1b4cddc38c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 973.083132] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9fff04c7-c0f7-43d9-9f9e-4f1b4cddc38c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 973.084051] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-500924d9-0956-446d-829f-eb97a3d37d72 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.095457] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fff04c7-c0f7-43d9-9f9e-4f1b4cddc38c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 973.095457] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b9a5cbf0-8ae2-404c-9b2d-6020105b7452 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.104201] env[69328]: DEBUG oslo_vmware.api [None req-9fff04c7-c0f7-43d9-9f9e-4f1b4cddc38c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for the task: (returnval){ [ 973.104201] env[69328]: value = "task-3273698" [ 973.104201] env[69328]: _type = "Task" [ 973.104201] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.115106] env[69328]: DEBUG oslo_vmware.api [None req-9fff04c7-c0f7-43d9-9f9e-4f1b4cddc38c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3273698, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.124702] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-19c2c942-1425-475c-9f96-3dfa57ef38cb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 973.124984] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-19c2c942-1425-475c-9f96-3dfa57ef38cb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 973.125232] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-19c2c942-1425-475c-9f96-3dfa57ef38cb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Deleting the datastore file [datastore1] 36f6aab5-2774-402b-9db6-9912f2d5d473 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 973.125510] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b256b2e1-9608-4b85-b371-e07971672f20 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.136671] env[69328]: DEBUG oslo_vmware.api [None req-19c2c942-1425-475c-9f96-3dfa57ef38cb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for the task: (returnval){ [ 973.136671] env[69328]: value = "task-3273699" [ 973.136671] env[69328]: _type = "Task" [ 973.136671] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.152950] env[69328]: DEBUG oslo_vmware.api [None req-19c2c942-1425-475c-9f96-3dfa57ef38cb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273699, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.232377] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65eff965-79cb-4e12-b997-d6b5111d19d9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.241503] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f43ee089-50c6-4823-8a94-530d025d1861 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.275898] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40d314b1-ea56-40d0-92db-607ce8c8a1bf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.284624] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6efda42-4a5e-4612-b7a8-8de2135daafe {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.300593] env[69328]: DEBUG nova.compute.provider_tree [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 973.318526] env[69328]: DEBUG oslo_vmware.api [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Task: {'id': task-3273692, 'name': PowerOnVM_Task, 'duration_secs': 0.572098} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.318526] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] [instance: 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 973.318755] env[69328]: INFO nova.compute.manager [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] [instance: 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea] Took 8.68 seconds to spawn the instance on the hypervisor. [ 973.318997] env[69328]: DEBUG nova.compute.manager [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] [instance: 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 973.320484] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec69cbff-20d8-46a8-9e95-1e0ff3f817f2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.656952] env[69328]: DEBUG oslo_vmware.api [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5282863c-7d88-2626-646f-96b7a122dca2, 'name': SearchDatastore_Task, 'duration_secs': 0.023482} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.656952] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 973.656952] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 973.656952] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 973.656952] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 973.656952] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 973.656952] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-90d2d338-3dde-486d-9b7f-8c14ba4e9132 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.656952] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 973.656952] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 973.656952] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c2e9f6b-7b13-4f9b-888f-c398be721a20 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.656952] env[69328]: DEBUG oslo_vmware.api [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Waiting for the task: (returnval){ [ 973.656952] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52104d3d-dde1-3fec-50bd-4d0371ff2749" [ 973.656952] env[69328]: _type = "Task" [ 973.656952] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.656952] env[69328]: DEBUG oslo_vmware.api [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52104d3d-dde1-3fec-50bd-4d0371ff2749, 'name': SearchDatastore_Task, 'duration_secs': 0.012448} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.656952] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd7ffdbc-fcfb-4ffa-b163-106bccfaa94c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.656952] env[69328]: DEBUG nova.network.neutron [req-1e190240-f818-4211-9f25-5d64243f4472 req-fd615671-3be2-4963-8eae-7c7a1cc25f92 service nova] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] Updated VIF entry in instance network info cache for port f7b2aa6f-1dee-4050-84ad-2ea0e567211b. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 973.659036] env[69328]: DEBUG nova.network.neutron [req-1e190240-f818-4211-9f25-5d64243f4472 req-fd615671-3be2-4963-8eae-7c7a1cc25f92 service nova] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] Updating instance_info_cache with network_info: [{"id": "f7b2aa6f-1dee-4050-84ad-2ea0e567211b", "address": "fa:16:3e:df:63:88", "network": {"id": "14cd88b6-4368-49ff-9977-49a60a9fadc7", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-930447636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "95c9dcf3546a4d4ab45e934d09241d14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7b2aa6f-1d", "ovs_interfaceid": "f7b2aa6f-1dee-4050-84ad-2ea0e567211b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 973.659036] env[69328]: DEBUG oslo_vmware.api [None req-9fff04c7-c0f7-43d9-9f9e-4f1b4cddc38c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3273698, 'name': PowerOffVM_Task, 'duration_secs': 0.229833} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.659036] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fff04c7-c0f7-43d9-9f9e-4f1b4cddc38c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 973.659036] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9fff04c7-c0f7-43d9-9f9e-4f1b4cddc38c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 973.659036] env[69328]: DEBUG oslo_vmware.api [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Waiting for the task: (returnval){ [ 973.659036] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52720448-6380-106c-12ff-a1a151c67e20" [ 973.659036] env[69328]: _type = "Task" [ 973.659036] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.659036] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-72252e7d-04fa-405a-9e09-f6931488d9aa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.659036] env[69328]: DEBUG oslo_vmware.api [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52720448-6380-106c-12ff-a1a151c67e20, 'name': SearchDatastore_Task, 'duration_secs': 0.013595} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.659036] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 973.659036] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 772ab9b3-23ac-46c6-acb1-af0b2726fd90/772ab9b3-23ac-46c6-acb1-af0b2726fd90.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 973.659036] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6fade33b-0af9-4eaa-94f1-d06a0b3da716 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.661109] env[69328]: DEBUG oslo_vmware.api [None req-19c2c942-1425-475c-9f96-3dfa57ef38cb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273699, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.32511} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.664418] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-19c2c942-1425-475c-9f96-3dfa57ef38cb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 973.664418] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-19c2c942-1425-475c-9f96-3dfa57ef38cb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 973.664418] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-19c2c942-1425-475c-9f96-3dfa57ef38cb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 973.664418] env[69328]: INFO nova.compute.manager [None req-19c2c942-1425-475c-9f96-3dfa57ef38cb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Took 1.17 seconds to destroy the instance on the hypervisor. [ 973.664418] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-19c2c942-1425-475c-9f96-3dfa57ef38cb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 973.665080] env[69328]: DEBUG oslo_vmware.api [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Waiting for the task: (returnval){ [ 973.665080] env[69328]: value = "task-3273701" [ 973.665080] env[69328]: _type = "Task" [ 973.665080] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.665490] env[69328]: DEBUG nova.compute.manager [-] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 973.665884] env[69328]: DEBUG nova.network.neutron [-] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 973.685193] env[69328]: DEBUG oslo_vmware.api [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Task: {'id': task-3273701, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.691277] env[69328]: DEBUG nova.compute.manager [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 973.712981] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9fff04c7-c0f7-43d9-9f9e-4f1b4cddc38c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 973.713253] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9fff04c7-c0f7-43d9-9f9e-4f1b4cddc38c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 973.713442] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fff04c7-c0f7-43d9-9f9e-4f1b4cddc38c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Deleting the datastore file [datastore2] 4d320c76-45bb-451c-8fbb-3dd2d64f56d5 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 973.713720] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-75dba8e5-5a62-4a14-b336-5813d9927ce0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.724794] env[69328]: DEBUG oslo_vmware.api [None req-9fff04c7-c0f7-43d9-9f9e-4f1b4cddc38c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for the task: (returnval){ [ 973.724794] env[69328]: value = "task-3273702" [ 973.724794] env[69328]: _type = "Task" [ 973.724794] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.731753] env[69328]: DEBUG nova.virt.hardware [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 973.731753] env[69328]: DEBUG nova.virt.hardware [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 973.731753] env[69328]: DEBUG nova.virt.hardware [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 973.731927] env[69328]: DEBUG nova.virt.hardware [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 973.732639] env[69328]: DEBUG nova.virt.hardware [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 973.732639] env[69328]: DEBUG nova.virt.hardware [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 973.732639] env[69328]: DEBUG nova.virt.hardware [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 973.732823] env[69328]: DEBUG nova.virt.hardware [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 973.733035] env[69328]: DEBUG nova.virt.hardware [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 973.733223] env[69328]: DEBUG nova.virt.hardware [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 973.733390] env[69328]: DEBUG nova.virt.hardware [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 973.734590] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74aa6d41-1efb-4507-8bcc-141aef9bb478 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.742972] env[69328]: DEBUG oslo_vmware.api [None req-9fff04c7-c0f7-43d9-9f9e-4f1b4cddc38c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3273702, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.746321] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f935e7ef-edf0-43e6-b43d-a07f2d550977 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.807742] env[69328]: DEBUG nova.scheduler.client.report [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 973.843132] env[69328]: INFO nova.compute.manager [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] [instance: 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea] Took 20.38 seconds to build instance. [ 974.123635] env[69328]: DEBUG oslo_concurrency.lockutils [req-1e190240-f818-4211-9f25-5d64243f4472 req-fd615671-3be2-4963-8eae-7c7a1cc25f92 service nova] Releasing lock "refresh_cache-772ab9b3-23ac-46c6-acb1-af0b2726fd90" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 974.184810] env[69328]: DEBUG oslo_vmware.api [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Task: {'id': task-3273701, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.240549] env[69328]: DEBUG oslo_vmware.api [None req-9fff04c7-c0f7-43d9-9f9e-4f1b4cddc38c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3273702, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.371611} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.241021] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fff04c7-c0f7-43d9-9f9e-4f1b4cddc38c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 974.241931] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9fff04c7-c0f7-43d9-9f9e-4f1b4cddc38c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 974.241931] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9fff04c7-c0f7-43d9-9f9e-4f1b4cddc38c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 974.242092] env[69328]: INFO nova.compute.manager [None req-9fff04c7-c0f7-43d9-9f9e-4f1b4cddc38c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Took 1.16 seconds to destroy the instance on the hypervisor. [ 974.242314] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9fff04c7-c0f7-43d9-9f9e-4f1b4cddc38c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 974.242889] env[69328]: DEBUG nova.compute.manager [-] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 974.243010] env[69328]: DEBUG nova.network.neutron [-] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 974.315033] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.655s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 974.315033] env[69328]: DEBUG nova.compute.manager [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 974.319918] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.845s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 974.322883] env[69328]: INFO nova.compute.claims [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 974.347291] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bb85fa95-b3d7-49d0-9a32-1c66b96941fb tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Lock "7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.897s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 974.684611] env[69328]: DEBUG oslo_vmware.api [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Task: {'id': task-3273701, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.735676} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.684611] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 772ab9b3-23ac-46c6-acb1-af0b2726fd90/772ab9b3-23ac-46c6-acb1-af0b2726fd90.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 974.684611] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 974.684861] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c98bf8a7-1894-4ffc-816c-bfaf311d29e5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.694043] env[69328]: DEBUG oslo_vmware.api [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Waiting for the task: (returnval){ [ 974.694043] env[69328]: value = "task-3273703" [ 974.694043] env[69328]: _type = "Task" [ 974.694043] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.705903] env[69328]: DEBUG oslo_vmware.api [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Task: {'id': task-3273703, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.710295] env[69328]: DEBUG nova.compute.manager [req-9d8cbedc-81d1-4a4e-b939-56dcc61c8286 req-e39f4703-c546-467c-afca-744f700616b8 service nova] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Received event network-vif-deleted-766304d2-5559-4007-9fa4-a01027d56e49 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 974.710565] env[69328]: INFO nova.compute.manager [req-9d8cbedc-81d1-4a4e-b939-56dcc61c8286 req-e39f4703-c546-467c-afca-744f700616b8 service nova] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Neutron deleted interface 766304d2-5559-4007-9fa4-a01027d56e49; detaching it from the instance and deleting it from the info cache [ 974.712063] env[69328]: DEBUG nova.network.neutron [req-9d8cbedc-81d1-4a4e-b939-56dcc61c8286 req-e39f4703-c546-467c-afca-744f700616b8 service nova] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 974.829320] env[69328]: DEBUG nova.compute.utils [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 974.835215] env[69328]: DEBUG nova.compute.manager [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 974.835215] env[69328]: DEBUG nova.network.neutron [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 974.850931] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Acquiring lock "65fccb3f-5e0e-4140-be0a-5ba20f494d50" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 974.851210] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Lock "65fccb3f-5e0e-4140-be0a-5ba20f494d50" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 974.925200] env[69328]: DEBUG nova.policy [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '19265c910cd04814978013416bf2a18a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '636412f89c9d488a9cfd6f19ef046efc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 974.932332] env[69328]: DEBUG nova.network.neutron [-] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.039166] env[69328]: DEBUG nova.compute.manager [req-07aba7f7-a7e0-48fa-8cbd-193555eaa2bf req-1a26cc63-4b65-4011-9114-48284bfd85c8 service nova] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Received event network-vif-plugged-7da3de27-ee87-400f-ae26-a3a6995a8363 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 975.039166] env[69328]: DEBUG oslo_concurrency.lockutils [req-07aba7f7-a7e0-48fa-8cbd-193555eaa2bf req-1a26cc63-4b65-4011-9114-48284bfd85c8 service nova] Acquiring lock "52c87371-4142-40d6-ac68-804aabd9f823-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 975.039166] env[69328]: DEBUG oslo_concurrency.lockutils [req-07aba7f7-a7e0-48fa-8cbd-193555eaa2bf req-1a26cc63-4b65-4011-9114-48284bfd85c8 service nova] Lock "52c87371-4142-40d6-ac68-804aabd9f823-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 975.039166] env[69328]: DEBUG oslo_concurrency.lockutils [req-07aba7f7-a7e0-48fa-8cbd-193555eaa2bf req-1a26cc63-4b65-4011-9114-48284bfd85c8 service nova] Lock "52c87371-4142-40d6-ac68-804aabd9f823-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 975.039719] env[69328]: DEBUG nova.compute.manager [req-07aba7f7-a7e0-48fa-8cbd-193555eaa2bf req-1a26cc63-4b65-4011-9114-48284bfd85c8 service nova] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] No waiting events found dispatching network-vif-plugged-7da3de27-ee87-400f-ae26-a3a6995a8363 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 975.040084] env[69328]: WARNING nova.compute.manager [req-07aba7f7-a7e0-48fa-8cbd-193555eaa2bf req-1a26cc63-4b65-4011-9114-48284bfd85c8 service nova] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Received unexpected event network-vif-plugged-7da3de27-ee87-400f-ae26-a3a6995a8363 for instance with vm_state building and task_state spawning. [ 975.135265] env[69328]: DEBUG nova.network.neutron [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Successfully updated port: 7da3de27-ee87-400f-ae26-a3a6995a8363 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 975.212746] env[69328]: DEBUG oslo_vmware.api [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Task: {'id': task-3273703, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076816} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.212746] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 975.212746] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c898bde-2503-4f8b-b50a-4cd9736c67e8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.976494] env[69328]: DEBUG nova.network.neutron [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Successfully created port: f7762174-4741-45a8-8a0e-8c4624ad29f6 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 975.981218] env[69328]: DEBUG nova.compute.manager [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 975.987130] env[69328]: DEBUG nova.compute.manager [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 975.988552] env[69328]: INFO nova.compute.manager [-] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Took 2.32 seconds to deallocate network for instance. [ 975.988750] env[69328]: DEBUG nova.network.neutron [-] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.989820] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquiring lock "refresh_cache-52c87371-4142-40d6-ac68-804aabd9f823" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 975.989943] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquired lock "refresh_cache-52c87371-4142-40d6-ac68-804aabd9f823" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 975.990090] env[69328]: DEBUG nova.network.neutron [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 975.991317] env[69328]: DEBUG oslo_concurrency.lockutils [None req-dfb65d93-8fb4-40e0-8193-169d3590a0ed tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Acquiring lock "7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 975.991520] env[69328]: DEBUG oslo_concurrency.lockutils [None req-dfb65d93-8fb4-40e0-8193-169d3590a0ed tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Lock "7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 975.991729] env[69328]: DEBUG oslo_concurrency.lockutils [None req-dfb65d93-8fb4-40e0-8193-169d3590a0ed tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Acquiring lock "7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 975.991973] env[69328]: DEBUG oslo_concurrency.lockutils [None req-dfb65d93-8fb4-40e0-8193-169d3590a0ed tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Lock "7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 975.992087] env[69328]: DEBUG oslo_concurrency.lockutils [None req-dfb65d93-8fb4-40e0-8193-169d3590a0ed tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Lock "7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 975.994374] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-162c44f9-341f-4967-bdc1-a1ecef41744e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.998440] env[69328]: INFO nova.compute.manager [None req-dfb65d93-8fb4-40e0-8193-169d3590a0ed tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] [instance: 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea] Terminating instance [ 976.014601] env[69328]: DEBUG nova.virt.hardware [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 976.014821] env[69328]: DEBUG nova.virt.hardware [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 976.014970] env[69328]: DEBUG nova.virt.hardware [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 976.015160] env[69328]: DEBUG nova.virt.hardware [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 976.015297] env[69328]: DEBUG nova.virt.hardware [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 976.015435] env[69328]: DEBUG nova.virt.hardware [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 976.015643] env[69328]: DEBUG nova.virt.hardware [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 976.015796] env[69328]: DEBUG nova.virt.hardware [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 976.015951] env[69328]: DEBUG nova.virt.hardware [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 976.016115] env[69328]: DEBUG nova.virt.hardware [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 976.016282] env[69328]: DEBUG nova.virt.hardware [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 976.025783] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] Reconfiguring VM instance instance-00000055 to attach disk [datastore1] 772ab9b3-23ac-46c6-acb1-af0b2726fd90/772ab9b3-23ac-46c6-acb1-af0b2726fd90.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 976.027308] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37021392-d37f-46df-89a2-c16ec539e016 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.034630] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9849034f-69a2-454d-b834-78680c1ebbcd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.053798] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eda4f5d2-34e5-4b4e-ba2d-c85a9d34321e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.073640] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d5e4837-ea8f-49f8-a07d-bfa61e40458a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.082271] env[69328]: DEBUG oslo_vmware.api [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Waiting for the task: (returnval){ [ 976.082271] env[69328]: value = "task-3273704" [ 976.082271] env[69328]: _type = "Task" [ 976.082271] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.099400] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:96:60:e7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1768af3d-3317-4ef5-b484-0c2707d63de7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6fa9c0fb-f285-4d44-8824-09041fd2f8f6', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 976.107853] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 976.123708] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 976.124155] env[69328]: DEBUG nova.compute.manager [req-9d8cbedc-81d1-4a4e-b939-56dcc61c8286 req-e39f4703-c546-467c-afca-744f700616b8 service nova] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Detach interface failed, port_id=766304d2-5559-4007-9fa4-a01027d56e49, reason: Instance 36f6aab5-2774-402b-9db6-9912f2d5d473 could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 976.130019] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3505ccd6-e9d4-4d5c-a093-3b8836a2dfed {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.145545] env[69328]: DEBUG oslo_vmware.api [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Task: {'id': task-3273704, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.152730] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 976.152730] env[69328]: value = "task-3273705" [ 976.152730] env[69328]: _type = "Task" [ 976.152730] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.168882] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273705, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.497899] env[69328]: INFO nova.compute.manager [-] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Took 2.25 seconds to deallocate network for instance. [ 976.500635] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa14beb4-d4a8-4f72-ba6b-a302b060d0d8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.514464] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e2c2dba-202f-43ea-9ffe-b117eba56f84 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.521495] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 976.547085] env[69328]: DEBUG oslo_concurrency.lockutils [None req-19c2c942-1425-475c-9f96-3dfa57ef38cb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 976.547811] env[69328]: DEBUG nova.compute.manager [None req-dfb65d93-8fb4-40e0-8193-169d3590a0ed tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] [instance: 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 976.549292] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-dfb65d93-8fb4-40e0-8193-169d3590a0ed tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] [instance: 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 976.550296] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-809c643d-8a16-40a5-bbe3-35fcbfaa560c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.553658] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37661b95-89ec-4eeb-95cc-44360e2595fb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.566021] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfb65d93-8fb4-40e0-8193-169d3590a0ed tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] [instance: 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 976.566021] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f20e479-0571-4080-8184-ffda4c42cad1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.570973] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fa632049-a73c-4fdc-8d33-a2a813f7d296 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.587651] env[69328]: DEBUG nova.compute.provider_tree [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 976.594360] env[69328]: DEBUG oslo_vmware.api [None req-dfb65d93-8fb4-40e0-8193-169d3590a0ed tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Waiting for the task: (returnval){ [ 976.594360] env[69328]: value = "task-3273706" [ 976.594360] env[69328]: _type = "Task" [ 976.594360] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.605134] env[69328]: DEBUG oslo_vmware.api [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Task: {'id': task-3273704, 'name': ReconfigVM_Task, 'duration_secs': 0.337926} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.605909] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] Reconfigured VM instance instance-00000055 to attach disk [datastore1] 772ab9b3-23ac-46c6-acb1-af0b2726fd90/772ab9b3-23ac-46c6-acb1-af0b2726fd90.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 976.607013] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-31c7d043-4fc8-4d6c-916c-a3c2888df21e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.614667] env[69328]: DEBUG oslo_vmware.api [None req-dfb65d93-8fb4-40e0-8193-169d3590a0ed tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Task: {'id': task-3273706, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.620999] env[69328]: DEBUG oslo_vmware.api [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Waiting for the task: (returnval){ [ 976.620999] env[69328]: value = "task-3273707" [ 976.620999] env[69328]: _type = "Task" [ 976.620999] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.631129] env[69328]: DEBUG oslo_vmware.api [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Task: {'id': task-3273707, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.634197] env[69328]: DEBUG nova.network.neutron [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 976.665616] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273705, 'name': CreateVM_Task, 'duration_secs': 0.483605} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.665616] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 976.665962] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 976.666126] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 976.666500] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 976.666781] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76f3a70c-60b2-4d1b-bebf-84aade6e9212 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.672822] env[69328]: DEBUG oslo_vmware.api [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 976.672822] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]521cbb7a-749a-934b-874f-ef873ce45107" [ 976.672822] env[69328]: _type = "Task" [ 976.672822] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.683146] env[69328]: DEBUG oslo_vmware.api [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]521cbb7a-749a-934b-874f-ef873ce45107, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.780930] env[69328]: DEBUG nova.compute.manager [req-575e8546-d3d7-49e2-b8df-129898ffec91 req-8b1ed265-bf35-4013-b11c-0b7c3d4b0fff service nova] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Received event network-vif-deleted-f11b7e60-0d64-4eba-a305-c8a67f80d4b8 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 976.839116] env[69328]: DEBUG nova.network.neutron [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Updating instance_info_cache with network_info: [{"id": "7da3de27-ee87-400f-ae26-a3a6995a8363", "address": "fa:16:3e:91:9b:b5", "network": {"id": "bd41ac10-1850-45a2-8e46-6ebdca3d8e13", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-766393176-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efd0e2d2f9ba4416bd8fd08dad912465", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e614f8e-6b11-4b6b-a421-904bca6acd91", "external-id": "nsx-vlan-transportzone-923", "segmentation_id": 923, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7da3de27-ee", "ovs_interfaceid": "7da3de27-ee87-400f-ae26-a3a6995a8363", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 977.007976] env[69328]: DEBUG nova.compute.manager [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 977.015666] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9fff04c7-c0f7-43d9-9f9e-4f1b4cddc38c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 977.035639] env[69328]: DEBUG nova.virt.hardware [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='bf58d87db36a91ddb38fd9ff8db045c4',container_format='bare',created_at=2025-04-03T17:41:38Z,direct_url=,disk_format='vmdk',id=44b9c7fb-3d5c-4708-a4ae-a8a2aba3a7ca,min_disk=1,min_ram=0,name='tempest-test-snap-1840107154',owner='636412f89c9d488a9cfd6f19ef046efc',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2025-04-03T17:41:52Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 977.035938] env[69328]: DEBUG nova.virt.hardware [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 977.036154] env[69328]: DEBUG nova.virt.hardware [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 977.036377] env[69328]: DEBUG nova.virt.hardware [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 977.036546] env[69328]: DEBUG nova.virt.hardware [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 977.036718] env[69328]: DEBUG nova.virt.hardware [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 977.036934] env[69328]: DEBUG nova.virt.hardware [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 977.037109] env[69328]: DEBUG nova.virt.hardware [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 977.037272] env[69328]: DEBUG nova.virt.hardware [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 977.037431] env[69328]: DEBUG nova.virt.hardware [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 977.037602] env[69328]: DEBUG nova.virt.hardware [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 977.038481] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bc5a95d-26de-457b-b169-d972afed78f3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.048160] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b46f30a5-2f9c-439e-8877-77a26cee2292 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.069373] env[69328]: DEBUG nova.compute.manager [req-8dbd202c-de4d-4b65-af30-07e7f1f9a780 req-31ae93a5-8d7c-4b45-b19e-f72dbb0c9516 service nova] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Received event network-changed-7da3de27-ee87-400f-ae26-a3a6995a8363 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 977.069567] env[69328]: DEBUG nova.compute.manager [req-8dbd202c-de4d-4b65-af30-07e7f1f9a780 req-31ae93a5-8d7c-4b45-b19e-f72dbb0c9516 service nova] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Refreshing instance network info cache due to event network-changed-7da3de27-ee87-400f-ae26-a3a6995a8363. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 977.069814] env[69328]: DEBUG oslo_concurrency.lockutils [req-8dbd202c-de4d-4b65-af30-07e7f1f9a780 req-31ae93a5-8d7c-4b45-b19e-f72dbb0c9516 service nova] Acquiring lock "refresh_cache-52c87371-4142-40d6-ac68-804aabd9f823" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 977.096471] env[69328]: DEBUG nova.scheduler.client.report [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 977.109436] env[69328]: DEBUG oslo_vmware.api [None req-dfb65d93-8fb4-40e0-8193-169d3590a0ed tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Task: {'id': task-3273706, 'name': PowerOffVM_Task, 'duration_secs': 0.244132} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.109724] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfb65d93-8fb4-40e0-8193-169d3590a0ed tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] [instance: 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 977.109889] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-dfb65d93-8fb4-40e0-8193-169d3590a0ed tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] [instance: 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 977.110155] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a1d96aaf-f3a9-4efa-afa0-12e9e6616a28 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.130628] env[69328]: DEBUG oslo_vmware.api [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Task: {'id': task-3273707, 'name': Rename_Task, 'duration_secs': 0.162682} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.130903] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 977.131164] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-10f9b3a7-4a2f-4921-8113-5dc485b31038 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.138072] env[69328]: DEBUG oslo_vmware.api [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Waiting for the task: (returnval){ [ 977.138072] env[69328]: value = "task-3273709" [ 977.138072] env[69328]: _type = "Task" [ 977.138072] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.147714] env[69328]: DEBUG oslo_vmware.api [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Task: {'id': task-3273709, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.186257] env[69328]: DEBUG oslo_vmware.api [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]521cbb7a-749a-934b-874f-ef873ce45107, 'name': SearchDatastore_Task, 'duration_secs': 0.012056} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.187819] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 977.187990] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 977.188255] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 977.188403] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 977.188581] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 977.188873] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-dfb65d93-8fb4-40e0-8193-169d3590a0ed tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] [instance: 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 977.189056] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-dfb65d93-8fb4-40e0-8193-169d3590a0ed tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] [instance: 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 977.189284] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfb65d93-8fb4-40e0-8193-169d3590a0ed tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Deleting the datastore file [datastore1] 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 977.189456] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3b1f3e37-2481-441e-96c5-6576a588a1b1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.191691] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a4d855d2-1c68-4fbb-8d2a-cadf4fdfe239 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.200627] env[69328]: DEBUG oslo_vmware.api [None req-dfb65d93-8fb4-40e0-8193-169d3590a0ed tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Waiting for the task: (returnval){ [ 977.200627] env[69328]: value = "task-3273710" [ 977.200627] env[69328]: _type = "Task" [ 977.200627] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.207395] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 977.207609] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 977.208820] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98925622-9e7d-4da0-9afe-76b881602ac8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.214939] env[69328]: DEBUG oslo_vmware.api [None req-dfb65d93-8fb4-40e0-8193-169d3590a0ed tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Task: {'id': task-3273710, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.216393] env[69328]: DEBUG oslo_vmware.api [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 977.216393] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f84d60-c521-3351-6a4b-442bb9f46144" [ 977.216393] env[69328]: _type = "Task" [ 977.216393] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.227038] env[69328]: DEBUG oslo_vmware.api [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f84d60-c521-3351-6a4b-442bb9f46144, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.344633] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Releasing lock "refresh_cache-52c87371-4142-40d6-ac68-804aabd9f823" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 977.345154] env[69328]: DEBUG nova.compute.manager [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Instance network_info: |[{"id": "7da3de27-ee87-400f-ae26-a3a6995a8363", "address": "fa:16:3e:91:9b:b5", "network": {"id": "bd41ac10-1850-45a2-8e46-6ebdca3d8e13", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-766393176-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efd0e2d2f9ba4416bd8fd08dad912465", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e614f8e-6b11-4b6b-a421-904bca6acd91", "external-id": "nsx-vlan-transportzone-923", "segmentation_id": 923, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7da3de27-ee", "ovs_interfaceid": "7da3de27-ee87-400f-ae26-a3a6995a8363", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 977.345543] env[69328]: DEBUG oslo_concurrency.lockutils [req-8dbd202c-de4d-4b65-af30-07e7f1f9a780 req-31ae93a5-8d7c-4b45-b19e-f72dbb0c9516 service nova] Acquired lock "refresh_cache-52c87371-4142-40d6-ac68-804aabd9f823" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 977.345758] env[69328]: DEBUG nova.network.neutron [req-8dbd202c-de4d-4b65-af30-07e7f1f9a780 req-31ae93a5-8d7c-4b45-b19e-f72dbb0c9516 service nova] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Refreshing network info cache for port 7da3de27-ee87-400f-ae26-a3a6995a8363 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 977.347222] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:91:9b:b5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2e614f8e-6b11-4b6b-a421-904bca6acd91', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7da3de27-ee87-400f-ae26-a3a6995a8363', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 977.354802] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 977.357761] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 977.358327] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aa2c14d1-810b-436f-9b20-7af6d5562b44 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.378724] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 977.378724] env[69328]: value = "task-3273711" [ 977.378724] env[69328]: _type = "Task" [ 977.378724] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.387146] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273711, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.604570] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.285s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 977.605150] env[69328]: DEBUG nova.compute.manager [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 977.610118] env[69328]: DEBUG oslo_concurrency.lockutils [None req-be4368cc-235b-4293-8984-4bd2a2f178dd tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.664s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 977.610403] env[69328]: DEBUG nova.objects.instance [None req-be4368cc-235b-4293-8984-4bd2a2f178dd tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lazy-loading 'resources' on Instance uuid 9ad2b2e3-460a-403e-bfc7-f46648c93849 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 977.627538] env[69328]: DEBUG nova.network.neutron [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Successfully updated port: f7762174-4741-45a8-8a0e-8c4624ad29f6 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 977.652017] env[69328]: DEBUG oslo_vmware.api [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Task: {'id': task-3273709, 'name': PowerOnVM_Task} progress is 87%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.662347] env[69328]: DEBUG nova.network.neutron [req-8dbd202c-de4d-4b65-af30-07e7f1f9a780 req-31ae93a5-8d7c-4b45-b19e-f72dbb0c9516 service nova] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Updated VIF entry in instance network info cache for port 7da3de27-ee87-400f-ae26-a3a6995a8363. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 977.662669] env[69328]: DEBUG nova.network.neutron [req-8dbd202c-de4d-4b65-af30-07e7f1f9a780 req-31ae93a5-8d7c-4b45-b19e-f72dbb0c9516 service nova] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Updating instance_info_cache with network_info: [{"id": "7da3de27-ee87-400f-ae26-a3a6995a8363", "address": "fa:16:3e:91:9b:b5", "network": {"id": "bd41ac10-1850-45a2-8e46-6ebdca3d8e13", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-766393176-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efd0e2d2f9ba4416bd8fd08dad912465", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e614f8e-6b11-4b6b-a421-904bca6acd91", "external-id": "nsx-vlan-transportzone-923", "segmentation_id": 923, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7da3de27-ee", "ovs_interfaceid": "7da3de27-ee87-400f-ae26-a3a6995a8363", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 977.711502] env[69328]: DEBUG oslo_vmware.api [None req-dfb65d93-8fb4-40e0-8193-169d3590a0ed tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Task: {'id': task-3273710, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146796} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.711912] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfb65d93-8fb4-40e0-8193-169d3590a0ed tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 977.712168] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-dfb65d93-8fb4-40e0-8193-169d3590a0ed tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] [instance: 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 977.712804] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-dfb65d93-8fb4-40e0-8193-169d3590a0ed tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] [instance: 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 977.712804] env[69328]: INFO nova.compute.manager [None req-dfb65d93-8fb4-40e0-8193-169d3590a0ed tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] [instance: 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea] Took 1.16 seconds to destroy the instance on the hypervisor. [ 977.712943] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-dfb65d93-8fb4-40e0-8193-169d3590a0ed tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 977.713113] env[69328]: DEBUG nova.compute.manager [-] [instance: 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 977.713353] env[69328]: DEBUG nova.network.neutron [-] [instance: 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 977.727638] env[69328]: DEBUG oslo_vmware.api [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f84d60-c521-3351-6a4b-442bb9f46144, 'name': SearchDatastore_Task, 'duration_secs': 0.01018} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.728476] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a09b46c4-76ba-498f-b5c6-c8cdcfe155d2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.739773] env[69328]: DEBUG oslo_vmware.api [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 977.739773] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e76b4b-7811-81ac-d21a-b6dcdfaeddf0" [ 977.739773] env[69328]: _type = "Task" [ 977.739773] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.748451] env[69328]: DEBUG oslo_vmware.api [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e76b4b-7811-81ac-d21a-b6dcdfaeddf0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.889212] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273711, 'name': CreateVM_Task, 'duration_secs': 0.438672} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.889212] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 977.889550] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 977.891897] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 977.891897] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 977.891897] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9967e26c-5f4a-49b8-8c69-f7a4e54d3bcb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.895623] env[69328]: DEBUG oslo_vmware.api [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 977.895623] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]521df494-64cf-445d-f72e-c2e7b7cfadf2" [ 977.895623] env[69328]: _type = "Task" [ 977.895623] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.904954] env[69328]: DEBUG oslo_vmware.api [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]521df494-64cf-445d-f72e-c2e7b7cfadf2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.118555] env[69328]: DEBUG nova.compute.utils [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 978.119890] env[69328]: DEBUG nova.compute.manager [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 978.120117] env[69328]: DEBUG nova.network.neutron [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 978.127621] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquiring lock "refresh_cache-51a9c492-6f91-4186-b550-ef12284b8a84" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 978.127621] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquired lock "refresh_cache-51a9c492-6f91-4186-b550-ef12284b8a84" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 978.127847] env[69328]: DEBUG nova.network.neutron [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 978.150793] env[69328]: DEBUG oslo_vmware.api [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Task: {'id': task-3273709, 'name': PowerOnVM_Task, 'duration_secs': 0.706883} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.154840] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 978.154840] env[69328]: INFO nova.compute.manager [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] Took 8.98 seconds to spawn the instance on the hypervisor. [ 978.154840] env[69328]: DEBUG nova.compute.manager [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 978.155436] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59b6bece-2e1f-4f21-9f7a-30ccd7a9a4e3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.173180] env[69328]: DEBUG nova.policy [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '43be625728f24af5a2f6a650279d689d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cdc479a290524130b9d17e627a64b65a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 978.174789] env[69328]: DEBUG oslo_concurrency.lockutils [req-8dbd202c-de4d-4b65-af30-07e7f1f9a780 req-31ae93a5-8d7c-4b45-b19e-f72dbb0c9516 service nova] Releasing lock "refresh_cache-52c87371-4142-40d6-ac68-804aabd9f823" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 978.256123] env[69328]: DEBUG oslo_vmware.api [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e76b4b-7811-81ac-d21a-b6dcdfaeddf0, 'name': SearchDatastore_Task, 'duration_secs': 0.014628} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.259271] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 978.259832] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 3ba646e8-a5c8-4917-a1c4-32b37affb598/3ba646e8-a5c8-4917-a1c4-32b37affb598.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 978.261393] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-53dc2c53-e8eb-43a9-b276-70e77ea6efbc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.268934] env[69328]: DEBUG oslo_vmware.api [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 978.268934] env[69328]: value = "task-3273712" [ 978.268934] env[69328]: _type = "Task" [ 978.268934] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.281285] env[69328]: DEBUG oslo_vmware.api [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273712, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.409509] env[69328]: DEBUG oslo_vmware.api [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]521df494-64cf-445d-f72e-c2e7b7cfadf2, 'name': SearchDatastore_Task, 'duration_secs': 0.011389} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.409893] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 978.410284] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 978.410585] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 978.410810] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 978.411014] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 978.411315] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bbe524b8-3a55-498b-a5b1-cb7a019d6146 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.424206] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 978.424406] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 978.425232] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b8a7618-0694-48f0-a309-45da0571c5f6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.434566] env[69328]: DEBUG oslo_vmware.api [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 978.434566] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]522c2ea0-16b7-7432-a00f-871065a4de77" [ 978.434566] env[69328]: _type = "Task" [ 978.434566] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.444704] env[69328]: DEBUG oslo_vmware.api [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]522c2ea0-16b7-7432-a00f-871065a4de77, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.487674] env[69328]: DEBUG nova.network.neutron [-] [instance: 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 978.533436] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4940bd2-cbe1-4119-916a-c96f82327c72 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.543205] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce9cf58d-2ac5-485b-a24b-66fdcb4a593e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.578640] env[69328]: DEBUG nova.network.neutron [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] Successfully created port: 3d319ee9-4b9f-43cd-b96e-f3b35e34ec76 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 978.581992] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fff4f020-808a-4d2c-9c89-8f501c4caee9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.591468] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c84850ed-9a03-4380-9169-c99c48387f4c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.607472] env[69328]: DEBUG nova.compute.provider_tree [None req-be4368cc-235b-4293-8984-4bd2a2f178dd tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 978.623407] env[69328]: DEBUG nova.compute.manager [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 978.680302] env[69328]: DEBUG nova.network.neutron [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 978.690733] env[69328]: INFO nova.compute.manager [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] Took 21.67 seconds to build instance. [ 978.782679] env[69328]: DEBUG oslo_vmware.api [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273712, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.807901] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Acquiring lock "a0952fdf-5570-4112-bc4d-e9f9cee1599c" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 978.807901] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Lock "a0952fdf-5570-4112-bc4d-e9f9cee1599c" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 978.807901] env[69328]: INFO nova.compute.manager [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Shelving [ 978.898399] env[69328]: DEBUG nova.network.neutron [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Updating instance_info_cache with network_info: [{"id": "f7762174-4741-45a8-8a0e-8c4624ad29f6", "address": "fa:16:3e:15:2b:82", "network": {"id": "5b6ce910-5dc1-49bb-ad4b-fe1a86509fc3", "bridge": "br-int", "label": "tempest-ImagesTestJSON-224847435-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "636412f89c9d488a9cfd6f19ef046efc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d1e1e320-ec56-4fcc-b6e9-30aa210d3b36", "external-id": "nsx-vlan-transportzone-447", "segmentation_id": 447, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7762174-47", "ovs_interfaceid": "f7762174-4741-45a8-8a0e-8c4624ad29f6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 978.948049] env[69328]: DEBUG oslo_vmware.api [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]522c2ea0-16b7-7432-a00f-871065a4de77, 'name': SearchDatastore_Task, 'duration_secs': 0.011209} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.948964] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0cae5461-2601-4369-ac93-e9a5fc0921eb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.955425] env[69328]: DEBUG oslo_vmware.api [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 978.955425] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f314ba-1abf-a78b-4f7e-dfb9a23d9086" [ 978.955425] env[69328]: _type = "Task" [ 978.955425] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.964311] env[69328]: DEBUG oslo_vmware.api [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f314ba-1abf-a78b-4f7e-dfb9a23d9086, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.990324] env[69328]: INFO nova.compute.manager [-] [instance: 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea] Took 1.28 seconds to deallocate network for instance. [ 979.018225] env[69328]: DEBUG nova.compute.manager [req-02285d8b-1f93-4705-86f0-c9536496290f req-05ccfcc4-e8fd-4c35-b200-58cc84bccb54 service nova] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Received event network-vif-plugged-f7762174-4741-45a8-8a0e-8c4624ad29f6 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 979.018225] env[69328]: DEBUG oslo_concurrency.lockutils [req-02285d8b-1f93-4705-86f0-c9536496290f req-05ccfcc4-e8fd-4c35-b200-58cc84bccb54 service nova] Acquiring lock "51a9c492-6f91-4186-b550-ef12284b8a84-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 979.018376] env[69328]: DEBUG oslo_concurrency.lockutils [req-02285d8b-1f93-4705-86f0-c9536496290f req-05ccfcc4-e8fd-4c35-b200-58cc84bccb54 service nova] Lock "51a9c492-6f91-4186-b550-ef12284b8a84-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 979.019437] env[69328]: DEBUG oslo_concurrency.lockutils [req-02285d8b-1f93-4705-86f0-c9536496290f req-05ccfcc4-e8fd-4c35-b200-58cc84bccb54 service nova] Lock "51a9c492-6f91-4186-b550-ef12284b8a84-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 979.019437] env[69328]: DEBUG nova.compute.manager [req-02285d8b-1f93-4705-86f0-c9536496290f req-05ccfcc4-e8fd-4c35-b200-58cc84bccb54 service nova] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] No waiting events found dispatching network-vif-plugged-f7762174-4741-45a8-8a0e-8c4624ad29f6 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 979.019437] env[69328]: WARNING nova.compute.manager [req-02285d8b-1f93-4705-86f0-c9536496290f req-05ccfcc4-e8fd-4c35-b200-58cc84bccb54 service nova] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Received unexpected event network-vif-plugged-f7762174-4741-45a8-8a0e-8c4624ad29f6 for instance with vm_state building and task_state spawning. [ 979.019437] env[69328]: DEBUG nova.compute.manager [req-02285d8b-1f93-4705-86f0-c9536496290f req-05ccfcc4-e8fd-4c35-b200-58cc84bccb54 service nova] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Received event network-changed-f7762174-4741-45a8-8a0e-8c4624ad29f6 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 979.019437] env[69328]: DEBUG nova.compute.manager [req-02285d8b-1f93-4705-86f0-c9536496290f req-05ccfcc4-e8fd-4c35-b200-58cc84bccb54 service nova] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Refreshing instance network info cache due to event network-changed-f7762174-4741-45a8-8a0e-8c4624ad29f6. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 979.020056] env[69328]: DEBUG oslo_concurrency.lockutils [req-02285d8b-1f93-4705-86f0-c9536496290f req-05ccfcc4-e8fd-4c35-b200-58cc84bccb54 service nova] Acquiring lock "refresh_cache-51a9c492-6f91-4186-b550-ef12284b8a84" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 979.113542] env[69328]: DEBUG nova.scheduler.client.report [None req-be4368cc-235b-4293-8984-4bd2a2f178dd tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 979.194151] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3243fc86-4b28-47cd-a6f4-ca28b2e48d41 tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Lock "772ab9b3-23ac-46c6-acb1-af0b2726fd90" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.184s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 979.212459] env[69328]: DEBUG nova.compute.manager [req-0dd27de6-a02f-45c0-a69f-c1a4a8d51cfc req-f1f8d660-2c8b-4121-8a8e-80ff4c37ca9c service nova] [instance: 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea] Received event network-vif-deleted-3d148298-6666-4189-8b2b-3048f28a8bb7 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 979.280752] env[69328]: DEBUG oslo_vmware.api [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273712, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.678358} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.281034] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 3ba646e8-a5c8-4917-a1c4-32b37affb598/3ba646e8-a5c8-4917-a1c4-32b37affb598.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 979.281705] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 979.281705] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-98f3e68b-e815-4cc3-afcf-c7f4662cc026 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.288609] env[69328]: DEBUG oslo_vmware.api [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 979.288609] env[69328]: value = "task-3273713" [ 979.288609] env[69328]: _type = "Task" [ 979.288609] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.297412] env[69328]: DEBUG oslo_vmware.api [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273713, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.402225] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Releasing lock "refresh_cache-51a9c492-6f91-4186-b550-ef12284b8a84" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 979.403030] env[69328]: DEBUG nova.compute.manager [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Instance network_info: |[{"id": "f7762174-4741-45a8-8a0e-8c4624ad29f6", "address": "fa:16:3e:15:2b:82", "network": {"id": "5b6ce910-5dc1-49bb-ad4b-fe1a86509fc3", "bridge": "br-int", "label": "tempest-ImagesTestJSON-224847435-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "636412f89c9d488a9cfd6f19ef046efc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d1e1e320-ec56-4fcc-b6e9-30aa210d3b36", "external-id": "nsx-vlan-transportzone-447", "segmentation_id": 447, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7762174-47", "ovs_interfaceid": "f7762174-4741-45a8-8a0e-8c4624ad29f6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 979.403030] env[69328]: DEBUG oslo_concurrency.lockutils [req-02285d8b-1f93-4705-86f0-c9536496290f req-05ccfcc4-e8fd-4c35-b200-58cc84bccb54 service nova] Acquired lock "refresh_cache-51a9c492-6f91-4186-b550-ef12284b8a84" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 979.403226] env[69328]: DEBUG nova.network.neutron [req-02285d8b-1f93-4705-86f0-c9536496290f req-05ccfcc4-e8fd-4c35-b200-58cc84bccb54 service nova] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Refreshing network info cache for port f7762174-4741-45a8-8a0e-8c4624ad29f6 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 979.404363] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:15:2b:82', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd1e1e320-ec56-4fcc-b6e9-30aa210d3b36', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f7762174-4741-45a8-8a0e-8c4624ad29f6', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 979.413149] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 979.414281] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 979.414586] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0e49f8a4-a9da-47d8-a8f9-132019f499bd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.443812] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 979.443812] env[69328]: value = "task-3273714" [ 979.443812] env[69328]: _type = "Task" [ 979.443812] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.454014] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273714, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.465658] env[69328]: DEBUG oslo_vmware.api [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f314ba-1abf-a78b-4f7e-dfb9a23d9086, 'name': SearchDatastore_Task, 'duration_secs': 0.010978} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.465658] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 979.465923] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 52c87371-4142-40d6-ac68-804aabd9f823/52c87371-4142-40d6-ac68-804aabd9f823.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 979.466051] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-41666ad9-2de4-4c2e-be2f-56b2a4ff200b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.475016] env[69328]: DEBUG oslo_vmware.api [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 979.475016] env[69328]: value = "task-3273715" [ 979.475016] env[69328]: _type = "Task" [ 979.475016] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.484790] env[69328]: DEBUG oslo_vmware.api [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273715, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.497992] env[69328]: DEBUG oslo_concurrency.lockutils [None req-dfb65d93-8fb4-40e0-8193-169d3590a0ed tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 979.528086] env[69328]: DEBUG oslo_concurrency.lockutils [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Acquiring lock "76210566-12d7-4f6a-afa1-6329e87e0f85" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 979.528400] env[69328]: DEBUG oslo_concurrency.lockutils [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lock "76210566-12d7-4f6a-afa1-6329e87e0f85" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 979.528593] env[69328]: INFO nova.compute.manager [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Shelving [ 979.619420] env[69328]: DEBUG oslo_concurrency.lockutils [None req-be4368cc-235b-4293-8984-4bd2a2f178dd tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.009s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 979.622616] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 12.415s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 979.632833] env[69328]: DEBUG nova.compute.manager [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 979.651244] env[69328]: INFO nova.scheduler.client.report [None req-be4368cc-235b-4293-8984-4bd2a2f178dd tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Deleted allocations for instance 9ad2b2e3-460a-403e-bfc7-f46648c93849 [ 979.667860] env[69328]: DEBUG nova.virt.hardware [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 979.668121] env[69328]: DEBUG nova.virt.hardware [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 979.668284] env[69328]: DEBUG nova.virt.hardware [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 979.668486] env[69328]: DEBUG nova.virt.hardware [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 979.668658] env[69328]: DEBUG nova.virt.hardware [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 979.668820] env[69328]: DEBUG nova.virt.hardware [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 979.669045] env[69328]: DEBUG nova.virt.hardware [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 979.669236] env[69328]: DEBUG nova.virt.hardware [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 979.669416] env[69328]: DEBUG nova.virt.hardware [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 979.669617] env[69328]: DEBUG nova.virt.hardware [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 979.669856] env[69328]: DEBUG nova.virt.hardware [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 979.671145] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55c514f4-16e6-43a7-bd58-63c0e2211913 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.680361] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eb28ade-16b6-4386-be9c-ba380d21619a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.801749] env[69328]: DEBUG oslo_vmware.api [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273713, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.090066} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.807130] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 979.807130] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bda132d-96e5-417c-a24a-3e231972813a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.825388] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 979.840197] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Reconfiguring VM instance instance-00000046 to attach disk [datastore2] 3ba646e8-a5c8-4917-a1c4-32b37affb598/3ba646e8-a5c8-4917-a1c4-32b37affb598.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 979.841383] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3684cc30-4feb-41f1-b2c9-6d3becd073db {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.844884] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-65a028f8-af0c-4656-a491-987f7d2b43bb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.870665] env[69328]: DEBUG oslo_vmware.api [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Waiting for the task: (returnval){ [ 979.870665] env[69328]: value = "task-3273716" [ 979.870665] env[69328]: _type = "Task" [ 979.870665] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.875320] env[69328]: DEBUG oslo_vmware.api [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 979.875320] env[69328]: value = "task-3273717" [ 979.875320] env[69328]: _type = "Task" [ 979.875320] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.883028] env[69328]: DEBUG oslo_vmware.api [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273716, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.897301] env[69328]: DEBUG oslo_vmware.api [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273717, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.952902] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273714, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.987117] env[69328]: DEBUG oslo_vmware.api [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273715, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.497696} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.987395] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 52c87371-4142-40d6-ac68-804aabd9f823/52c87371-4142-40d6-ac68-804aabd9f823.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 979.987766] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 979.988626] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3c036fe0-431e-4f99-84c0-e02855402f8b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.999243] env[69328]: DEBUG oslo_vmware.api [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 979.999243] env[69328]: value = "task-3273718" [ 979.999243] env[69328]: _type = "Task" [ 979.999243] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.010561] env[69328]: DEBUG oslo_vmware.api [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273718, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.128685] env[69328]: INFO nova.compute.claims [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 980.162267] env[69328]: DEBUG oslo_concurrency.lockutils [None req-be4368cc-235b-4293-8984-4bd2a2f178dd tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "9ad2b2e3-460a-403e-bfc7-f46648c93849" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.702s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 980.204243] env[69328]: DEBUG nova.network.neutron [req-02285d8b-1f93-4705-86f0-c9536496290f req-05ccfcc4-e8fd-4c35-b200-58cc84bccb54 service nova] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Updated VIF entry in instance network info cache for port f7762174-4741-45a8-8a0e-8c4624ad29f6. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 980.204677] env[69328]: DEBUG nova.network.neutron [req-02285d8b-1f93-4705-86f0-c9536496290f req-05ccfcc4-e8fd-4c35-b200-58cc84bccb54 service nova] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Updating instance_info_cache with network_info: [{"id": "f7762174-4741-45a8-8a0e-8c4624ad29f6", "address": "fa:16:3e:15:2b:82", "network": {"id": "5b6ce910-5dc1-49bb-ad4b-fe1a86509fc3", "bridge": "br-int", "label": "tempest-ImagesTestJSON-224847435-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "636412f89c9d488a9cfd6f19ef046efc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d1e1e320-ec56-4fcc-b6e9-30aa210d3b36", "external-id": "nsx-vlan-transportzone-447", "segmentation_id": 447, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7762174-47", "ovs_interfaceid": "f7762174-4741-45a8-8a0e-8c4624ad29f6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 980.243180] env[69328]: DEBUG nova.network.neutron [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] Successfully updated port: 3d319ee9-4b9f-43cd-b96e-f3b35e34ec76 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 980.383690] env[69328]: DEBUG oslo_vmware.api [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273716, 'name': PowerOffVM_Task, 'duration_secs': 0.275624} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.384408] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 980.385321] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffb65543-a3ca-43b6-9ea0-6c575a920214 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.393726] env[69328]: DEBUG oslo_vmware.api [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273717, 'name': ReconfigVM_Task, 'duration_secs': 0.33692} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.394363] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Reconfigured VM instance instance-00000046 to attach disk [datastore2] 3ba646e8-a5c8-4917-a1c4-32b37affb598/3ba646e8-a5c8-4917-a1c4-32b37affb598.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 980.396335] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'boot_index': 0, 'encryption_secret_uuid': None, 'disk_bus': None, 'device_name': '/dev/sda', 'encrypted': False, 'encryption_options': None, 'size': 0, 'encryption_format': None, 'guest_format': None, 'device_type': 'disk', 'image_id': 'a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318'}], 'ephemerals': [], 'block_device_mapping': [{'mount_device': '/dev/sdb', 'boot_index': None, 'delete_on_termination': False, 'disk_bus': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653876', 'volume_id': 'a39fd325-7c9d-4482-b7a4-43b28bf52e5c', 'name': 'volume-a39fd325-7c9d-4482-b7a4-43b28bf52e5c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3ba646e8-a5c8-4917-a1c4-32b37affb598', 'attached_at': '', 'detached_at': '', 'volume_id': 'a39fd325-7c9d-4482-b7a4-43b28bf52e5c', 'serial': 'a39fd325-7c9d-4482-b7a4-43b28bf52e5c'}, 'guest_format': None, 'device_type': None, 'attachment_id': '8325a068-f5bd-4258-b353-fcb69a59645d', 'volume_type': None}], 'swap': None} {{(pid=69328) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 980.396595] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Volume attach. Driver type: vmdk {{(pid=69328) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 980.396756] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653876', 'volume_id': 'a39fd325-7c9d-4482-b7a4-43b28bf52e5c', 'name': 'volume-a39fd325-7c9d-4482-b7a4-43b28bf52e5c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3ba646e8-a5c8-4917-a1c4-32b37affb598', 'attached_at': '', 'detached_at': '', 'volume_id': 'a39fd325-7c9d-4482-b7a4-43b28bf52e5c', 'serial': 'a39fd325-7c9d-4482-b7a4-43b28bf52e5c'} {{(pid=69328) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 980.412397] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f3d2c5f-d21f-4b83-8d36-d6341e547bdb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.418972] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3498ab1f-d348-4349-9d73-570003e5464c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.449519] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07537b45-535f-4d67-9f14-29021f3631ca {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.458045] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273714, 'name': CreateVM_Task, 'duration_secs': 0.618044} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.471407] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 980.479841] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Reconfiguring VM instance instance-00000046 to attach disk [datastore2] volume-a39fd325-7c9d-4482-b7a4-43b28bf52e5c/volume-a39fd325-7c9d-4482-b7a4-43b28bf52e5c.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 980.480612] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/44b9c7fb-3d5c-4708-a4ae-a8a2aba3a7ca" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 980.480775] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquired lock "[datastore2] devstack-image-cache_base/44b9c7fb-3d5c-4708-a4ae-a8a2aba3a7ca" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 980.481209] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/44b9c7fb-3d5c-4708-a4ae-a8a2aba3a7ca" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 980.481758] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3574a1f4-2314-4d4d-b170-0b59e0b6386b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.495804] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0c7a447-4aed-440f-9cf2-79b009327db6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.501303] env[69328]: DEBUG oslo_vmware.api [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 980.501303] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5268ceeb-e29e-e1c0-53d2-2c9daba8980e" [ 980.501303] env[69328]: _type = "Task" [ 980.501303] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.508633] env[69328]: DEBUG oslo_vmware.api [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 980.508633] env[69328]: value = "task-3273719" [ 980.508633] env[69328]: _type = "Task" [ 980.508633] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.517934] env[69328]: DEBUG oslo_vmware.api [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273718, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.124523} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.518160] env[69328]: DEBUG oslo_vmware.api [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5268ceeb-e29e-e1c0-53d2-2c9daba8980e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.518764] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 980.519616] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c754f9f3-a882-42b3-86f7-eff2ab5039e4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.524906] env[69328]: DEBUG oslo_vmware.api [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273719, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.545132] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Reconfiguring VM instance instance-00000056 to attach disk [datastore1] 52c87371-4142-40d6-ac68-804aabd9f823/52c87371-4142-40d6-ac68-804aabd9f823.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 980.545716] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 980.545932] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ff0c8286-9687-49ba-bb38-2ed68a3707f4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.562140] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4112a5a7-b683-4d4a-97ee-481e757060b7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.571789] env[69328]: DEBUG oslo_vmware.api [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 980.571789] env[69328]: value = "task-3273720" [ 980.571789] env[69328]: _type = "Task" [ 980.571789] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.572084] env[69328]: DEBUG oslo_vmware.api [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 980.572084] env[69328]: value = "task-3273721" [ 980.572084] env[69328]: _type = "Task" [ 980.572084] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.585157] env[69328]: DEBUG oslo_vmware.api [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273721, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.588220] env[69328]: DEBUG oslo_vmware.api [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273720, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.635986] env[69328]: INFO nova.compute.resource_tracker [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Updating resource usage from migration d9a4d9ef-a86e-49ef-841e-1e4093b2e6d7 [ 980.708179] env[69328]: DEBUG oslo_concurrency.lockutils [req-02285d8b-1f93-4705-86f0-c9536496290f req-05ccfcc4-e8fd-4c35-b200-58cc84bccb54 service nova] Releasing lock "refresh_cache-51a9c492-6f91-4186-b550-ef12284b8a84" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 980.747308] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "refresh_cache-b61436f5-0e8b-4da5-9459-cf9487dfd23f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 980.747581] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquired lock "refresh_cache-b61436f5-0e8b-4da5-9459-cf9487dfd23f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 980.747845] env[69328]: DEBUG nova.network.neutron [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 980.919510] env[69328]: DEBUG oslo_concurrency.lockutils [None req-65bf4397-837d-43f7-abf5-96e2a73bb4ae tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Acquiring lock "772ab9b3-23ac-46c6-acb1-af0b2726fd90" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 980.919799] env[69328]: DEBUG oslo_concurrency.lockutils [None req-65bf4397-837d-43f7-abf5-96e2a73bb4ae tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Lock "772ab9b3-23ac-46c6-acb1-af0b2726fd90" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 980.919974] env[69328]: DEBUG oslo_concurrency.lockutils [None req-65bf4397-837d-43f7-abf5-96e2a73bb4ae tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Acquiring lock "772ab9b3-23ac-46c6-acb1-af0b2726fd90-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 980.920258] env[69328]: DEBUG oslo_concurrency.lockutils [None req-65bf4397-837d-43f7-abf5-96e2a73bb4ae tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Lock "772ab9b3-23ac-46c6-acb1-af0b2726fd90-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 980.920442] env[69328]: DEBUG oslo_concurrency.lockutils [None req-65bf4397-837d-43f7-abf5-96e2a73bb4ae tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Lock "772ab9b3-23ac-46c6-acb1-af0b2726fd90-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 980.922669] env[69328]: INFO nova.compute.manager [None req-65bf4397-837d-43f7-abf5-96e2a73bb4ae tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] Terminating instance [ 980.947527] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Creating Snapshot of the VM instance {{(pid=69328) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 980.947786] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-0afdf057-5915-4400-bf9f-5b2db7263cee {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.956912] env[69328]: DEBUG oslo_vmware.api [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Waiting for the task: (returnval){ [ 980.956912] env[69328]: value = "task-3273722" [ 980.956912] env[69328]: _type = "Task" [ 980.956912] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.970644] env[69328]: DEBUG oslo_vmware.api [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273722, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.979656] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58db7dba-6337-47da-a4ba-37a292ec3532 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.987394] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88dcf6d7-7f4a-4997-8d6a-fbf089663fb2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.023908] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98f82f0d-58af-49e1-8ea9-d43541ec0bb9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.033568] env[69328]: DEBUG oslo_vmware.api [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273719, 'name': ReconfigVM_Task, 'duration_secs': 0.328942} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.036772] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Reconfigured VM instance instance-00000046 to attach disk [datastore2] volume-a39fd325-7c9d-4482-b7a4-43b28bf52e5c/volume-a39fd325-7c9d-4482-b7a4-43b28bf52e5c.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 981.042215] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df1fe6f8-37c3-41ca-98d6-963ba2c79a20 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.046023] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2e1df704-747d-4bcc-979c-1cef7a649628 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.055801] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Releasing lock "[datastore2] devstack-image-cache_base/44b9c7fb-3d5c-4708-a4ae-a8a2aba3a7ca" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 981.056053] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Processing image 44b9c7fb-3d5c-4708-a4ae-a8a2aba3a7ca {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 981.056302] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/44b9c7fb-3d5c-4708-a4ae-a8a2aba3a7ca/44b9c7fb-3d5c-4708-a4ae-a8a2aba3a7ca.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 981.056445] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquired lock "[datastore2] devstack-image-cache_base/44b9c7fb-3d5c-4708-a4ae-a8a2aba3a7ca/44b9c7fb-3d5c-4708-a4ae-a8a2aba3a7ca.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 981.056616] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 981.057246] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1d0f73b6-438f-44d4-b4ed-15252434401e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.069567] env[69328]: DEBUG nova.compute.provider_tree [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 981.072888] env[69328]: DEBUG oslo_vmware.api [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 981.072888] env[69328]: value = "task-3273723" [ 981.072888] env[69328]: _type = "Task" [ 981.072888] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.073423] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 981.073423] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 981.079783] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed3ddb04-8ae3-4ca2-9684-3b61e2a8906f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.095636] env[69328]: DEBUG oslo_vmware.api [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 981.095636] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]528e9a25-580e-c780-5b15-dc9b39c6f081" [ 981.095636] env[69328]: _type = "Task" [ 981.095636] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.096215] env[69328]: DEBUG oslo_vmware.api [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273720, 'name': PowerOffVM_Task, 'duration_secs': 0.216563} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.096428] env[69328]: DEBUG oslo_vmware.api [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273721, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.099991] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 981.100140] env[69328]: DEBUG oslo_vmware.api [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273723, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.103890] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1340573f-2677-41c9-a098-6a17c73ee34f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.129158] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8965f248-5445-4d79-b01d-4e6b1186bfe7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.131238] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Preparing fetch location {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 981.131462] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Fetch image to [datastore2] OSTACK_IMG_3be16e45-d79e-4346-9dee-4e0c5b2d2252/OSTACK_IMG_3be16e45-d79e-4346-9dee-4e0c5b2d2252.vmdk {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 981.131641] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Downloading stream optimized image 44b9c7fb-3d5c-4708-a4ae-a8a2aba3a7ca to [datastore2] OSTACK_IMG_3be16e45-d79e-4346-9dee-4e0c5b2d2252/OSTACK_IMG_3be16e45-d79e-4346-9dee-4e0c5b2d2252.vmdk on the data store datastore2 as vApp {{(pid=69328) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 981.131802] env[69328]: DEBUG nova.virt.vmwareapi.images [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Downloading image file data 44b9c7fb-3d5c-4708-a4ae-a8a2aba3a7ca to the ESX as VM named 'OSTACK_IMG_3be16e45-d79e-4346-9dee-4e0c5b2d2252' {{(pid=69328) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 981.223640] env[69328]: DEBUG oslo_vmware.rw_handles [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 981.223640] env[69328]: value = "resgroup-9" [ 981.223640] env[69328]: _type = "ResourcePool" [ 981.223640] env[69328]: }. {{(pid=69328) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 981.224102] env[69328]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-7cd3280d-0a0d-470e-a395-5d4eb5b9c1e7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.242769] env[69328]: DEBUG nova.compute.manager [req-bb17828b-85d6-4c8b-b2cf-e85e869ddff0 req-fedb8a5a-9897-4e6f-a326-d020a30ec84c service nova] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] Received event network-vif-plugged-3d319ee9-4b9f-43cd-b96e-f3b35e34ec76 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 981.242769] env[69328]: DEBUG oslo_concurrency.lockutils [req-bb17828b-85d6-4c8b-b2cf-e85e869ddff0 req-fedb8a5a-9897-4e6f-a326-d020a30ec84c service nova] Acquiring lock "b61436f5-0e8b-4da5-9459-cf9487dfd23f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 981.243372] env[69328]: DEBUG oslo_concurrency.lockutils [req-bb17828b-85d6-4c8b-b2cf-e85e869ddff0 req-fedb8a5a-9897-4e6f-a326-d020a30ec84c service nova] Lock "b61436f5-0e8b-4da5-9459-cf9487dfd23f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 981.243372] env[69328]: DEBUG oslo_concurrency.lockutils [req-bb17828b-85d6-4c8b-b2cf-e85e869ddff0 req-fedb8a5a-9897-4e6f-a326-d020a30ec84c service nova] Lock "b61436f5-0e8b-4da5-9459-cf9487dfd23f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 981.243372] env[69328]: DEBUG nova.compute.manager [req-bb17828b-85d6-4c8b-b2cf-e85e869ddff0 req-fedb8a5a-9897-4e6f-a326-d020a30ec84c service nova] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] No waiting events found dispatching network-vif-plugged-3d319ee9-4b9f-43cd-b96e-f3b35e34ec76 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 981.243372] env[69328]: WARNING nova.compute.manager [req-bb17828b-85d6-4c8b-b2cf-e85e869ddff0 req-fedb8a5a-9897-4e6f-a326-d020a30ec84c service nova] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] Received unexpected event network-vif-plugged-3d319ee9-4b9f-43cd-b96e-f3b35e34ec76 for instance with vm_state building and task_state spawning. [ 981.243561] env[69328]: DEBUG nova.compute.manager [req-bb17828b-85d6-4c8b-b2cf-e85e869ddff0 req-fedb8a5a-9897-4e6f-a326-d020a30ec84c service nova] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] Received event network-changed-3d319ee9-4b9f-43cd-b96e-f3b35e34ec76 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 981.243922] env[69328]: DEBUG nova.compute.manager [req-bb17828b-85d6-4c8b-b2cf-e85e869ddff0 req-fedb8a5a-9897-4e6f-a326-d020a30ec84c service nova] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] Refreshing instance network info cache due to event network-changed-3d319ee9-4b9f-43cd-b96e-f3b35e34ec76. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 981.243922] env[69328]: DEBUG oslo_concurrency.lockutils [req-bb17828b-85d6-4c8b-b2cf-e85e869ddff0 req-fedb8a5a-9897-4e6f-a326-d020a30ec84c service nova] Acquiring lock "refresh_cache-b61436f5-0e8b-4da5-9459-cf9487dfd23f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 981.247752] env[69328]: DEBUG oslo_vmware.rw_handles [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lease: (returnval){ [ 981.247752] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5202e3a2-6c26-3de0-9a41-718adfc7faa9" [ 981.247752] env[69328]: _type = "HttpNfcLease" [ 981.247752] env[69328]: } obtained for vApp import into resource pool (val){ [ 981.247752] env[69328]: value = "resgroup-9" [ 981.247752] env[69328]: _type = "ResourcePool" [ 981.247752] env[69328]: }. {{(pid=69328) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 981.248058] env[69328]: DEBUG oslo_vmware.api [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the lease: (returnval){ [ 981.248058] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5202e3a2-6c26-3de0-9a41-718adfc7faa9" [ 981.248058] env[69328]: _type = "HttpNfcLease" [ 981.248058] env[69328]: } to be ready. {{(pid=69328) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 981.260766] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "a7d4893f-31d4-449d-96d5-a2a1377d8454" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 981.261020] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "a7d4893f-31d4-449d-96d5-a2a1377d8454" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 981.262167] env[69328]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 981.262167] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5202e3a2-6c26-3de0-9a41-718adfc7faa9" [ 981.262167] env[69328]: _type = "HttpNfcLease" [ 981.262167] env[69328]: } is initializing. {{(pid=69328) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 981.289227] env[69328]: DEBUG nova.network.neutron [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 981.429371] env[69328]: DEBUG nova.compute.manager [None req-65bf4397-837d-43f7-abf5-96e2a73bb4ae tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 981.429674] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-65bf4397-837d-43f7-abf5-96e2a73bb4ae tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 981.430721] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aab89538-80bd-4a0b-966a-957715408d82 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.441443] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-65bf4397-837d-43f7-abf5-96e2a73bb4ae tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 981.441751] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6732615c-baad-4b6c-8e4d-2af2904bd789 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.449814] env[69328]: DEBUG oslo_vmware.api [None req-65bf4397-837d-43f7-abf5-96e2a73bb4ae tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Waiting for the task: (returnval){ [ 981.449814] env[69328]: value = "task-3273725" [ 981.449814] env[69328]: _type = "Task" [ 981.449814] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.457709] env[69328]: DEBUG nova.network.neutron [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] Updating instance_info_cache with network_info: [{"id": "3d319ee9-4b9f-43cd-b96e-f3b35e34ec76", "address": "fa:16:3e:c5:4d:7f", "network": {"id": "620f277d-ca49-41da-83a0-afb8c393e26e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1223326239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cdc479a290524130b9d17e627a64b65a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86a35d07-53d3-46b3-92cb-ae34236c0f41", "external-id": "nsx-vlan-transportzone-811", "segmentation_id": 811, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d319ee9-4b", "ovs_interfaceid": "3d319ee9-4b9f-43cd-b96e-f3b35e34ec76", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 981.469613] env[69328]: DEBUG oslo_vmware.api [None req-65bf4397-837d-43f7-abf5-96e2a73bb4ae tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Task: {'id': task-3273725, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.478196] env[69328]: DEBUG oslo_vmware.api [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273722, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.579273] env[69328]: DEBUG nova.scheduler.client.report [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 981.596727] env[69328]: DEBUG oslo_vmware.api [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273721, 'name': ReconfigVM_Task, 'duration_secs': 0.69882} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.600922] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Reconfigured VM instance instance-00000056 to attach disk [datastore1] 52c87371-4142-40d6-ac68-804aabd9f823/52c87371-4142-40d6-ac68-804aabd9f823.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 981.601710] env[69328]: DEBUG oslo_vmware.api [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273723, 'name': ReconfigVM_Task, 'duration_secs': 0.186767} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.602669] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ffead65c-6c60-440d-a748-41c26c6e8767 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.604532] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653876', 'volume_id': 'a39fd325-7c9d-4482-b7a4-43b28bf52e5c', 'name': 'volume-a39fd325-7c9d-4482-b7a4-43b28bf52e5c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3ba646e8-a5c8-4917-a1c4-32b37affb598', 'attached_at': '', 'detached_at': '', 'volume_id': 'a39fd325-7c9d-4482-b7a4-43b28bf52e5c', 'serial': 'a39fd325-7c9d-4482-b7a4-43b28bf52e5c'} {{(pid=69328) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 981.605393] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c0b7ccb6-cc6c-4253-a2bd-20eb24dbe297 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.615025] env[69328]: DEBUG oslo_vmware.api [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 981.615025] env[69328]: value = "task-3273727" [ 981.615025] env[69328]: _type = "Task" [ 981.615025] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.616567] env[69328]: DEBUG oslo_vmware.api [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 981.616567] env[69328]: value = "task-3273726" [ 981.616567] env[69328]: _type = "Task" [ 981.616567] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.634013] env[69328]: DEBUG oslo_vmware.api [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273727, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.634368] env[69328]: DEBUG oslo_vmware.api [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273726, 'name': Rename_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.643969] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Creating Snapshot of the VM instance {{(pid=69328) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 981.644370] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-5c586c8b-d878-44ce-8f93-72653e7b5606 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.666447] env[69328]: DEBUG oslo_vmware.api [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 981.666447] env[69328]: value = "task-3273728" [ 981.666447] env[69328]: _type = "Task" [ 981.666447] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.679723] env[69328]: DEBUG oslo_vmware.api [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273728, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.757190] env[69328]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 981.757190] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5202e3a2-6c26-3de0-9a41-718adfc7faa9" [ 981.757190] env[69328]: _type = "HttpNfcLease" [ 981.757190] env[69328]: } is ready. {{(pid=69328) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 981.757504] env[69328]: DEBUG oslo_vmware.rw_handles [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 981.757504] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5202e3a2-6c26-3de0-9a41-718adfc7faa9" [ 981.757504] env[69328]: _type = "HttpNfcLease" [ 981.757504] env[69328]: }. {{(pid=69328) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 981.758286] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93a7c066-f563-4a0b-911a-f6dd213da310 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.767702] env[69328]: DEBUG nova.compute.manager [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 981.771324] env[69328]: DEBUG oslo_vmware.rw_handles [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527f35e1-a8f5-39b7-4bef-4fea7545fba2/disk-0.vmdk from lease info. {{(pid=69328) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 981.771731] env[69328]: DEBUG oslo_vmware.rw_handles [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527f35e1-a8f5-39b7-4bef-4fea7545fba2/disk-0.vmdk. {{(pid=69328) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 981.839462] env[69328]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d44e2f3a-5edf-4fc8-81e2-702f31fb4740 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.862242] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 981.862499] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 981.961044] env[69328]: DEBUG oslo_vmware.api [None req-65bf4397-837d-43f7-abf5-96e2a73bb4ae tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Task: {'id': task-3273725, 'name': PowerOffVM_Task, 'duration_secs': 0.214139} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.964423] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-65bf4397-837d-43f7-abf5-96e2a73bb4ae tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 981.964651] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-65bf4397-837d-43f7-abf5-96e2a73bb4ae tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 981.965534] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Releasing lock "refresh_cache-b61436f5-0e8b-4da5-9459-cf9487dfd23f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 981.965872] env[69328]: DEBUG nova.compute.manager [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] Instance network_info: |[{"id": "3d319ee9-4b9f-43cd-b96e-f3b35e34ec76", "address": "fa:16:3e:c5:4d:7f", "network": {"id": "620f277d-ca49-41da-83a0-afb8c393e26e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1223326239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cdc479a290524130b9d17e627a64b65a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86a35d07-53d3-46b3-92cb-ae34236c0f41", "external-id": "nsx-vlan-transportzone-811", "segmentation_id": 811, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d319ee9-4b", "ovs_interfaceid": "3d319ee9-4b9f-43cd-b96e-f3b35e34ec76", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 981.966159] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9605dddb-51dc-4586-b950-5bb24d9420c9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.968405] env[69328]: DEBUG oslo_concurrency.lockutils [req-bb17828b-85d6-4c8b-b2cf-e85e869ddff0 req-fedb8a5a-9897-4e6f-a326-d020a30ec84c service nova] Acquired lock "refresh_cache-b61436f5-0e8b-4da5-9459-cf9487dfd23f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 981.968630] env[69328]: DEBUG nova.network.neutron [req-bb17828b-85d6-4c8b-b2cf-e85e869ddff0 req-fedb8a5a-9897-4e6f-a326-d020a30ec84c service nova] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] Refreshing network info cache for port 3d319ee9-4b9f-43cd-b96e-f3b35e34ec76 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 981.970085] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c5:4d:7f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '86a35d07-53d3-46b3-92cb-ae34236c0f41', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3d319ee9-4b9f-43cd-b96e-f3b35e34ec76', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 981.978629] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 981.983289] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 981.984058] env[69328]: DEBUG oslo_vmware.api [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273722, 'name': CreateSnapshot_Task, 'duration_secs': 0.858211} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.984323] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f9d263b8-9f63-4350-bdd1-c0d99dd7207d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.001435] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Created Snapshot of the VM instance {{(pid=69328) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 982.006049] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0fb9de5-e0f7-45dd-abb6-f804627f0e31 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.025321] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 982.025321] env[69328]: value = "task-3273730" [ 982.025321] env[69328]: _type = "Task" [ 982.025321] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.042886] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273730, 'name': CreateVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.055273] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-65bf4397-837d-43f7-abf5-96e2a73bb4ae tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 982.055630] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-65bf4397-837d-43f7-abf5-96e2a73bb4ae tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 982.055861] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-65bf4397-837d-43f7-abf5-96e2a73bb4ae tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Deleting the datastore file [datastore1] 772ab9b3-23ac-46c6-acb1-af0b2726fd90 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 982.056253] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dad4c350-6c06-4b92-8542-d1f8859a75d2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.070546] env[69328]: DEBUG oslo_vmware.api [None req-65bf4397-837d-43f7-abf5-96e2a73bb4ae tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Waiting for the task: (returnval){ [ 982.070546] env[69328]: value = "task-3273731" [ 982.070546] env[69328]: _type = "Task" [ 982.070546] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.083050] env[69328]: DEBUG oslo_vmware.api [None req-65bf4397-837d-43f7-abf5-96e2a73bb4ae tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Task: {'id': task-3273731, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.091478] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.469s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 982.091742] env[69328]: INFO nova.compute.manager [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Migrating [ 982.099951] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.167s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 982.101521] env[69328]: INFO nova.compute.claims [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 982.132744] env[69328]: DEBUG oslo_vmware.api [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273727, 'name': Rename_Task, 'duration_secs': 0.207739} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.137424] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 982.138087] env[69328]: DEBUG oslo_vmware.api [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273726, 'name': Rename_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.139186] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e27d6bdf-d4e2-47c6-9aea-5859e039a346 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.147729] env[69328]: DEBUG oslo_vmware.api [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 982.147729] env[69328]: value = "task-3273732" [ 982.147729] env[69328]: _type = "Task" [ 982.147729] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.159475] env[69328]: DEBUG oslo_vmware.api [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273732, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.177750] env[69328]: DEBUG oslo_vmware.api [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273728, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.295980] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 982.372235] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 982.372297] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 982.372571] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 982.372692] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 982.372791] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 982.373140] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 982.373140] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69328) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 982.373281] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager.update_available_resource {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 982.535959] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Creating linked-clone VM from snapshot {{(pid=69328) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 982.538468] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-89c9a754-607b-4fff-86ba-c9fdfbd6e671 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.552133] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273730, 'name': CreateVM_Task, 'duration_secs': 0.414922} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.555114] env[69328]: DEBUG oslo_vmware.rw_handles [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Completed reading data from the image iterator. {{(pid=69328) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 982.555245] env[69328]: DEBUG oslo_vmware.rw_handles [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527f35e1-a8f5-39b7-4bef-4fea7545fba2/disk-0.vmdk. {{(pid=69328) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 982.555502] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 982.555881] env[69328]: DEBUG oslo_vmware.api [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Waiting for the task: (returnval){ [ 982.555881] env[69328]: value = "task-3273733" [ 982.555881] env[69328]: _type = "Task" [ 982.555881] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.556804] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed0a57f6-d78a-4c25-844d-44ee8d7ecbc9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.560286] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.560448] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 982.560822] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 982.561208] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e851c27-3336-4645-b57c-4bf859c8c221 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.574813] env[69328]: DEBUG oslo_vmware.api [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 982.574813] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]528351f0-0b7e-24af-8142-f2594cecb5ac" [ 982.574813] env[69328]: _type = "Task" [ 982.574813] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.575216] env[69328]: DEBUG oslo_vmware.api [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273733, 'name': CloneVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.579708] env[69328]: DEBUG oslo_vmware.rw_handles [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527f35e1-a8f5-39b7-4bef-4fea7545fba2/disk-0.vmdk is in state: ready. {{(pid=69328) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 982.579899] env[69328]: DEBUG oslo_vmware.rw_handles [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527f35e1-a8f5-39b7-4bef-4fea7545fba2/disk-0.vmdk. {{(pid=69328) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 982.584012] env[69328]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-a34e887e-3f08-40e2-b83a-cebaa0e97651 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.595304] env[69328]: DEBUG oslo_vmware.api [None req-65bf4397-837d-43f7-abf5-96e2a73bb4ae tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Task: {'id': task-3273731, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142911} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.599434] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-65bf4397-837d-43f7-abf5-96e2a73bb4ae tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 982.599696] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-65bf4397-837d-43f7-abf5-96e2a73bb4ae tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 982.599898] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-65bf4397-837d-43f7-abf5-96e2a73bb4ae tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 982.600112] env[69328]: INFO nova.compute.manager [None req-65bf4397-837d-43f7-abf5-96e2a73bb4ae tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] Took 1.17 seconds to destroy the instance on the hypervisor. [ 982.600398] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-65bf4397-837d-43f7-abf5-96e2a73bb4ae tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 982.600653] env[69328]: DEBUG oslo_vmware.api [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]528351f0-0b7e-24af-8142-f2594cecb5ac, 'name': SearchDatastore_Task, 'duration_secs': 0.011922} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.600879] env[69328]: DEBUG nova.compute.manager [-] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 982.600979] env[69328]: DEBUG nova.network.neutron [-] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 982.603040] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 982.603297] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 982.603564] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.603741] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 982.603948] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 982.604283] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-680f4a88-df44-4f9a-98b7-33e54062c3ed {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.615592] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "refresh_cache-96f604a9-e42c-4aa8-b5b5-edcb34901d94" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.615790] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquired lock "refresh_cache-96f604a9-e42c-4aa8-b5b5-edcb34901d94" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 982.616041] env[69328]: DEBUG nova.network.neutron [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 982.624765] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 982.624765] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 982.628916] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc5e0910-fb4f-453c-8f6c-1fa03f3189cb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.636042] env[69328]: DEBUG oslo_vmware.api [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 982.636042] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52adfaa2-da49-748b-7917-f24c7eea57a8" [ 982.636042] env[69328]: _type = "Task" [ 982.636042] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.640457] env[69328]: DEBUG oslo_vmware.api [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273726, 'name': Rename_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.667260] env[69328]: DEBUG oslo_vmware.api [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52adfaa2-da49-748b-7917-f24c7eea57a8, 'name': SearchDatastore_Task, 'duration_secs': 0.025858} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.670257] env[69328]: DEBUG oslo_vmware.api [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3273732, 'name': PowerOnVM_Task, 'duration_secs': 0.517353} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.672805] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-448df8db-8531-4e0b-9028-297a660d4337 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.679068] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 982.679416] env[69328]: INFO nova.compute.manager [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Took 8.99 seconds to spawn the instance on the hypervisor. [ 982.679637] env[69328]: DEBUG nova.compute.manager [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 982.684047] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3edbb93-dc88-4651-8624-f34969888788 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.690476] env[69328]: DEBUG oslo_vmware.api [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 982.690476] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52681f51-dc6c-3054-cfe4-7eb0df109157" [ 982.690476] env[69328]: _type = "Task" [ 982.690476] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.699331] env[69328]: DEBUG oslo_vmware.api [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273728, 'name': CreateSnapshot_Task, 'duration_secs': 0.549113} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.702814] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Created Snapshot of the VM instance {{(pid=69328) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 982.703592] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e864e70e-5e20-48dd-9c39-3e98a88447ef {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.712213] env[69328]: DEBUG oslo_vmware.api [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52681f51-dc6c-3054-cfe4-7eb0df109157, 'name': SearchDatastore_Task, 'duration_secs': 0.011292} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.715787] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 982.716103] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] b61436f5-0e8b-4da5-9459-cf9487dfd23f/b61436f5-0e8b-4da5-9459-cf9487dfd23f.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 982.719128] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9de3519b-7e5b-4fa6-9697-ab869618b778 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.729898] env[69328]: DEBUG oslo_vmware.api [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 982.729898] env[69328]: value = "task-3273734" [ 982.729898] env[69328]: _type = "Task" [ 982.729898] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.740036] env[69328]: DEBUG oslo_vmware.api [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273734, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.761265] env[69328]: DEBUG nova.network.neutron [req-bb17828b-85d6-4c8b-b2cf-e85e869ddff0 req-fedb8a5a-9897-4e6f-a326-d020a30ec84c service nova] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] Updated VIF entry in instance network info cache for port 3d319ee9-4b9f-43cd-b96e-f3b35e34ec76. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 982.761265] env[69328]: DEBUG nova.network.neutron [req-bb17828b-85d6-4c8b-b2cf-e85e869ddff0 req-fedb8a5a-9897-4e6f-a326-d020a30ec84c service nova] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] Updating instance_info_cache with network_info: [{"id": "3d319ee9-4b9f-43cd-b96e-f3b35e34ec76", "address": "fa:16:3e:c5:4d:7f", "network": {"id": "620f277d-ca49-41da-83a0-afb8c393e26e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1223326239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cdc479a290524130b9d17e627a64b65a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86a35d07-53d3-46b3-92cb-ae34236c0f41", "external-id": "nsx-vlan-transportzone-811", "segmentation_id": 811, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d319ee9-4b", "ovs_interfaceid": "3d319ee9-4b9f-43cd-b96e-f3b35e34ec76", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 982.815892] env[69328]: DEBUG oslo_vmware.rw_handles [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527f35e1-a8f5-39b7-4bef-4fea7545fba2/disk-0.vmdk. {{(pid=69328) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 982.817965] env[69328]: INFO nova.virt.vmwareapi.images [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Downloaded image file data 44b9c7fb-3d5c-4708-a4ae-a8a2aba3a7ca [ 982.817965] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b340de47-63d7-4dd8-bc73-12d913b78933 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.834975] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-60e27dcf-3788-438a-9498-40f3473a6f18 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.866666] env[69328]: INFO nova.virt.vmwareapi.images [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] The imported VM was unregistered [ 982.868874] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Caching image {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 982.869214] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Creating directory with path [datastore2] devstack-image-cache_base/44b9c7fb-3d5c-4708-a4ae-a8a2aba3a7ca {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 982.869456] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ee740ca8-ecd4-4721-b03d-d12ca93a94b1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.876162] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 982.883255] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Created directory with path [datastore2] devstack-image-cache_base/44b9c7fb-3d5c-4708-a4ae-a8a2aba3a7ca {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 982.883539] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_3be16e45-d79e-4346-9dee-4e0c5b2d2252/OSTACK_IMG_3be16e45-d79e-4346-9dee-4e0c5b2d2252.vmdk to [datastore2] devstack-image-cache_base/44b9c7fb-3d5c-4708-a4ae-a8a2aba3a7ca/44b9c7fb-3d5c-4708-a4ae-a8a2aba3a7ca.vmdk. {{(pid=69328) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 982.883833] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-3a945eb4-dfe7-4fce-85c7-af4110120023 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.894438] env[69328]: DEBUG oslo_vmware.api [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 982.894438] env[69328]: value = "task-3273736" [ 982.894438] env[69328]: _type = "Task" [ 982.894438] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.904415] env[69328]: DEBUG oslo_vmware.api [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273736, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.009217] env[69328]: DEBUG nova.compute.manager [req-26f8a8f9-343f-4e9d-8215-2a4662d9487d req-17522ea7-3204-4486-93c7-2b110d6aad6e service nova] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] Received event network-vif-deleted-f7b2aa6f-1dee-4050-84ad-2ea0e567211b {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 983.009484] env[69328]: INFO nova.compute.manager [req-26f8a8f9-343f-4e9d-8215-2a4662d9487d req-17522ea7-3204-4486-93c7-2b110d6aad6e service nova] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] Neutron deleted interface f7b2aa6f-1dee-4050-84ad-2ea0e567211b; detaching it from the instance and deleting it from the info cache [ 983.009683] env[69328]: DEBUG nova.network.neutron [req-26f8a8f9-343f-4e9d-8215-2a4662d9487d req-17522ea7-3204-4486-93c7-2b110d6aad6e service nova] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 983.075248] env[69328]: DEBUG oslo_vmware.api [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273733, 'name': CloneVM_Task} progress is 94%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.136617] env[69328]: DEBUG oslo_vmware.api [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273726, 'name': Rename_Task, 'duration_secs': 1.201701} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.139592] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 983.140873] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a1b9360d-eac2-4151-acc3-f1851bb27bdb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.149934] env[69328]: DEBUG oslo_vmware.api [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 983.149934] env[69328]: value = "task-3273737" [ 983.149934] env[69328]: _type = "Task" [ 983.149934] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.163975] env[69328]: DEBUG oslo_vmware.api [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273737, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.207029] env[69328]: INFO nova.compute.manager [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Took 22.78 seconds to build instance. [ 983.226566] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Creating linked-clone VM from snapshot {{(pid=69328) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 983.226874] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-603edfd3-fb61-474b-881d-83db6893dc7a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.248713] env[69328]: DEBUG oslo_vmware.api [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273734, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.513119} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.252568] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] b61436f5-0e8b-4da5-9459-cf9487dfd23f/b61436f5-0e8b-4da5-9459-cf9487dfd23f.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 983.252802] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 983.253116] env[69328]: DEBUG oslo_vmware.api [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 983.253116] env[69328]: value = "task-3273738" [ 983.253116] env[69328]: _type = "Task" [ 983.253116] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.254214] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-aa98dc23-05c0-45ea-b8af-18557f185566 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.265244] env[69328]: DEBUG oslo_concurrency.lockutils [req-bb17828b-85d6-4c8b-b2cf-e85e869ddff0 req-fedb8a5a-9897-4e6f-a326-d020a30ec84c service nova] Releasing lock "refresh_cache-b61436f5-0e8b-4da5-9459-cf9487dfd23f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 983.269770] env[69328]: DEBUG oslo_vmware.api [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273738, 'name': CloneVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.271067] env[69328]: DEBUG oslo_vmware.api [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 983.271067] env[69328]: value = "task-3273739" [ 983.271067] env[69328]: _type = "Task" [ 983.271067] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.285280] env[69328]: DEBUG oslo_vmware.api [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273739, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.406122] env[69328]: DEBUG oslo_vmware.api [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273736, 'name': MoveVirtualDisk_Task} progress is 12%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.430944] env[69328]: DEBUG nova.network.neutron [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Updating instance_info_cache with network_info: [{"id": "d97e62a9-59f8-4f3b-9296-f5a0803d2b10", "address": "fa:16:3e:81:01:fa", "network": {"id": "e2ab6957-2c9d-4b95-91bd-5a62d9a284ba", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-967470666-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75d5853e3c724d02bacfa75173e38ab3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abcf0d10-3f3f-45dc-923e-1c78766e2dad", "external-id": "nsx-vlan-transportzone-405", "segmentation_id": 405, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd97e62a9-59", "ovs_interfaceid": "d97e62a9-59f8-4f3b-9296-f5a0803d2b10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 983.492929] env[69328]: DEBUG nova.network.neutron [-] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 983.516778] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4f7b3643-9f18-4aca-93a3-85021e9b6fe9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.527577] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f93eaf9-d28d-439c-b521-1de413a54e65 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.590971] env[69328]: DEBUG nova.compute.manager [req-26f8a8f9-343f-4e9d-8215-2a4662d9487d req-17522ea7-3204-4486-93c7-2b110d6aad6e service nova] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] Detach interface failed, port_id=f7b2aa6f-1dee-4050-84ad-2ea0e567211b, reason: Instance 772ab9b3-23ac-46c6-acb1-af0b2726fd90 could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 983.597193] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7221efb-27be-4fac-90fd-807cc6c69fd7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.608676] env[69328]: DEBUG oslo_vmware.api [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273733, 'name': CloneVM_Task} progress is 94%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.611495] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31bdc979-4353-4142-8f13-64603065c1db {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.647964] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deb74342-b97c-41d3-a418-94b7e152fa46 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.660542] env[69328]: DEBUG oslo_vmware.api [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273737, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.664034] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49668dd9-cd8a-4472-88dd-ad81713a3360 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.679925] env[69328]: DEBUG nova.compute.provider_tree [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 983.709742] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a8999829-2087-4c41-b085-d982876cb91b tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Lock "52c87371-4142-40d6-ac68-804aabd9f823" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.289s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 983.768865] env[69328]: DEBUG oslo_vmware.api [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273738, 'name': CloneVM_Task} progress is 94%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.781242] env[69328]: DEBUG oslo_vmware.api [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273739, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079653} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.781612] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 983.782547] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e18537a-b66a-4b92-8123-f9179fe733ca {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.807440] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] b61436f5-0e8b-4da5-9459-cf9487dfd23f/b61436f5-0e8b-4da5-9459-cf9487dfd23f.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 983.807793] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f40ad1a0-1753-4e3d-aac1-31e5b39f8bbd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.830885] env[69328]: DEBUG oslo_vmware.api [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 983.830885] env[69328]: value = "task-3273740" [ 983.830885] env[69328]: _type = "Task" [ 983.830885] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.841779] env[69328]: DEBUG oslo_vmware.api [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273740, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.906101] env[69328]: DEBUG oslo_vmware.api [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273736, 'name': MoveVirtualDisk_Task} progress is 32%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.934118] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Releasing lock "refresh_cache-96f604a9-e42c-4aa8-b5b5-edcb34901d94" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 983.996145] env[69328]: INFO nova.compute.manager [-] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] Took 1.39 seconds to deallocate network for instance. [ 984.101531] env[69328]: DEBUG oslo_vmware.api [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273733, 'name': CloneVM_Task} progress is 94%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.161860] env[69328]: DEBUG oslo_vmware.api [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273737, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.183705] env[69328]: DEBUG nova.scheduler.client.report [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 984.269108] env[69328]: DEBUG oslo_vmware.api [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273738, 'name': CloneVM_Task} progress is 94%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.343098] env[69328]: DEBUG oslo_vmware.api [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273740, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.406842] env[69328]: DEBUG oslo_vmware.api [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273736, 'name': MoveVirtualDisk_Task} progress is 52%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.505701] env[69328]: DEBUG oslo_concurrency.lockutils [None req-65bf4397-837d-43f7-abf5-96e2a73bb4ae tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 984.601232] env[69328]: DEBUG oslo_vmware.api [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273733, 'name': CloneVM_Task} progress is 94%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.663018] env[69328]: DEBUG oslo_vmware.api [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273737, 'name': PowerOnVM_Task} progress is 71%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.689205] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.589s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 984.689910] env[69328]: DEBUG nova.compute.manager [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 984.692875] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.171s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 984.695430] env[69328]: INFO nova.compute.claims [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 984.772363] env[69328]: DEBUG oslo_vmware.api [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273738, 'name': CloneVM_Task} progress is 95%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.844031] env[69328]: DEBUG oslo_vmware.api [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273740, 'name': ReconfigVM_Task, 'duration_secs': 0.532308} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.844393] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] Reconfigured VM instance instance-00000058 to attach disk [datastore1] b61436f5-0e8b-4da5-9459-cf9487dfd23f/b61436f5-0e8b-4da5-9459-cf9487dfd23f.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 984.845055] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a22138cc-2402-4433-a3a7-0f8903c9e8c5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.854585] env[69328]: DEBUG oslo_vmware.api [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 984.854585] env[69328]: value = "task-3273741" [ 984.854585] env[69328]: _type = "Task" [ 984.854585] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.864097] env[69328]: DEBUG oslo_vmware.api [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273741, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.907985] env[69328]: DEBUG oslo_vmware.api [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273736, 'name': MoveVirtualDisk_Task} progress is 74%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.093149] env[69328]: DEBUG nova.compute.manager [req-a907f15c-8c99-4cbc-8947-2b5748999e24 req-6a25aba4-c53b-4c72-bf65-932041ab7dfe service nova] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Received event network-changed-7da3de27-ee87-400f-ae26-a3a6995a8363 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 985.093358] env[69328]: DEBUG nova.compute.manager [req-a907f15c-8c99-4cbc-8947-2b5748999e24 req-6a25aba4-c53b-4c72-bf65-932041ab7dfe service nova] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Refreshing instance network info cache due to event network-changed-7da3de27-ee87-400f-ae26-a3a6995a8363. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 985.093581] env[69328]: DEBUG oslo_concurrency.lockutils [req-a907f15c-8c99-4cbc-8947-2b5748999e24 req-6a25aba4-c53b-4c72-bf65-932041ab7dfe service nova] Acquiring lock "refresh_cache-52c87371-4142-40d6-ac68-804aabd9f823" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 985.093729] env[69328]: DEBUG oslo_concurrency.lockutils [req-a907f15c-8c99-4cbc-8947-2b5748999e24 req-6a25aba4-c53b-4c72-bf65-932041ab7dfe service nova] Acquired lock "refresh_cache-52c87371-4142-40d6-ac68-804aabd9f823" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 985.093909] env[69328]: DEBUG nova.network.neutron [req-a907f15c-8c99-4cbc-8947-2b5748999e24 req-6a25aba4-c53b-4c72-bf65-932041ab7dfe service nova] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Refreshing network info cache for port 7da3de27-ee87-400f-ae26-a3a6995a8363 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 985.106706] env[69328]: DEBUG oslo_vmware.api [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273733, 'name': CloneVM_Task, 'duration_secs': 2.499079} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.107355] env[69328]: INFO nova.virt.vmwareapi.vmops [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Created linked-clone VM from snapshot [ 985.108462] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a86bd8cd-877c-4d2e-a54a-e7241130190f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.119023] env[69328]: DEBUG nova.virt.vmwareapi.images [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Uploading image 8868d8b6-e8a6-4c40-9bca-fb6ec2c24443 {{(pid=69328) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 985.147874] env[69328]: DEBUG oslo_vmware.rw_handles [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 985.147874] env[69328]: value = "vm-653891" [ 985.147874] env[69328]: _type = "VirtualMachine" [ 985.147874] env[69328]: }. {{(pid=69328) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 985.148220] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-182d127d-22c4-4856-ae58-6f8f00d68cc6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.157441] env[69328]: DEBUG oslo_vmware.rw_handles [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Lease: (returnval){ [ 985.157441] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e00afa-b52f-93ea-6ab1-44ef20b6323d" [ 985.157441] env[69328]: _type = "HttpNfcLease" [ 985.157441] env[69328]: } obtained for exporting VM: (result){ [ 985.157441] env[69328]: value = "vm-653891" [ 985.157441] env[69328]: _type = "VirtualMachine" [ 985.157441] env[69328]: }. {{(pid=69328) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 985.157868] env[69328]: DEBUG oslo_vmware.api [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Waiting for the lease: (returnval){ [ 985.157868] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e00afa-b52f-93ea-6ab1-44ef20b6323d" [ 985.157868] env[69328]: _type = "HttpNfcLease" [ 985.157868] env[69328]: } to be ready. {{(pid=69328) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 985.165711] env[69328]: DEBUG oslo_vmware.api [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273737, 'name': PowerOnVM_Task} progress is 71%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.170162] env[69328]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 985.170162] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e00afa-b52f-93ea-6ab1-44ef20b6323d" [ 985.170162] env[69328]: _type = "HttpNfcLease" [ 985.170162] env[69328]: } is initializing. {{(pid=69328) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 985.201060] env[69328]: DEBUG nova.compute.utils [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 985.206692] env[69328]: DEBUG nova.compute.manager [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 985.207068] env[69328]: DEBUG nova.network.neutron [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 985.249190] env[69328]: DEBUG nova.policy [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0ca24c1b09374feeaec13dfeeaf02d94', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6bad0df17bba4bc996fe5cf1faf23fad', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 985.269689] env[69328]: DEBUG oslo_vmware.api [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273738, 'name': CloneVM_Task, 'duration_secs': 1.660339} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.270125] env[69328]: INFO nova.virt.vmwareapi.vmops [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Created linked-clone VM from snapshot [ 985.270922] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-622199a4-ce04-4f0f-b96e-a9c846b6c20a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.279853] env[69328]: DEBUG nova.virt.vmwareapi.images [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Uploading image 83b1e553-81a0-4dcf-a9f7-df6e5e0289ab {{(pid=69328) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 985.304051] env[69328]: DEBUG oslo_vmware.rw_handles [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 985.304051] env[69328]: value = "vm-653892" [ 985.304051] env[69328]: _type = "VirtualMachine" [ 985.304051] env[69328]: }. {{(pid=69328) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 985.304724] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-e6a6a944-2166-4f53-86a7-d9ec19cd1d69 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.315415] env[69328]: DEBUG oslo_vmware.rw_handles [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lease: (returnval){ [ 985.315415] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]527db3fb-5b97-aef6-8c94-9754ff1d783b" [ 985.315415] env[69328]: _type = "HttpNfcLease" [ 985.315415] env[69328]: } obtained for exporting VM: (result){ [ 985.315415] env[69328]: value = "vm-653892" [ 985.315415] env[69328]: _type = "VirtualMachine" [ 985.315415] env[69328]: }. {{(pid=69328) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 985.316254] env[69328]: DEBUG oslo_vmware.api [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the lease: (returnval){ [ 985.316254] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]527db3fb-5b97-aef6-8c94-9754ff1d783b" [ 985.316254] env[69328]: _type = "HttpNfcLease" [ 985.316254] env[69328]: } to be ready. {{(pid=69328) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 985.333023] env[69328]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 985.333023] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]527db3fb-5b97-aef6-8c94-9754ff1d783b" [ 985.333023] env[69328]: _type = "HttpNfcLease" [ 985.333023] env[69328]: } is ready. {{(pid=69328) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 985.333023] env[69328]: DEBUG oslo_vmware.rw_handles [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 985.333023] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]527db3fb-5b97-aef6-8c94-9754ff1d783b" [ 985.333023] env[69328]: _type = "HttpNfcLease" [ 985.333023] env[69328]: }. {{(pid=69328) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 985.333827] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6330c36d-3844-468b-b754-59b9417b3108 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.343575] env[69328]: DEBUG oslo_vmware.rw_handles [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ee239d-5169-dbcf-9db8-a7d1898b740d/disk-0.vmdk from lease info. {{(pid=69328) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 985.343938] env[69328]: DEBUG oslo_vmware.rw_handles [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ee239d-5169-dbcf-9db8-a7d1898b740d/disk-0.vmdk for reading. {{(pid=69328) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 985.425150] env[69328]: DEBUG oslo_vmware.api [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273736, 'name': MoveVirtualDisk_Task} progress is 94%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.428678] env[69328]: DEBUG oslo_vmware.api [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273741, 'name': Rename_Task, 'duration_secs': 0.257381} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.429039] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 985.429315] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-67b01991-08d3-464b-99c5-554067fc7ffe {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.438070] env[69328]: DEBUG oslo_vmware.api [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 985.438070] env[69328]: value = "task-3273744" [ 985.438070] env[69328]: _type = "Task" [ 985.438070] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.451916] env[69328]: DEBUG oslo_vmware.api [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273744, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.453337] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d48b2ee-1363-4d08-a2f9-6c8be173e932 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.459425] env[69328]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-5ddd9ec8-2e36-4101-b7c4-44891622a5fa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.476204] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Updating instance '96f604a9-e42c-4aa8-b5b5-edcb34901d94' progress to 0 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 985.648779] env[69328]: DEBUG nova.network.neutron [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] Successfully created port: b7d14440-051d-478f-8bda-be652bd1f72f {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 985.666736] env[69328]: DEBUG oslo_vmware.api [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273737, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.670826] env[69328]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 985.670826] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e00afa-b52f-93ea-6ab1-44ef20b6323d" [ 985.670826] env[69328]: _type = "HttpNfcLease" [ 985.670826] env[69328]: } is ready. {{(pid=69328) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 985.671233] env[69328]: DEBUG oslo_vmware.rw_handles [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 985.671233] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e00afa-b52f-93ea-6ab1-44ef20b6323d" [ 985.671233] env[69328]: _type = "HttpNfcLease" [ 985.671233] env[69328]: }. {{(pid=69328) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 985.671973] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1d1d0ca-ec1a-4209-ab9a-35de75027365 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.680608] env[69328]: DEBUG oslo_vmware.rw_handles [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52842b21-fb0c-9b7e-108a-2c49e2ad5b0b/disk-0.vmdk from lease info. {{(pid=69328) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 985.680796] env[69328]: DEBUG oslo_vmware.rw_handles [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52842b21-fb0c-9b7e-108a-2c49e2ad5b0b/disk-0.vmdk for reading. {{(pid=69328) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 985.737601] env[69328]: DEBUG nova.compute.manager [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 985.869691] env[69328]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-399f4293-cc02-4351-9edc-5b39bf0294b9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.920190] env[69328]: DEBUG oslo_vmware.api [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273736, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.728878} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.920462] env[69328]: INFO nova.virt.vmwareapi.ds_util [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_3be16e45-d79e-4346-9dee-4e0c5b2d2252/OSTACK_IMG_3be16e45-d79e-4346-9dee-4e0c5b2d2252.vmdk to [datastore2] devstack-image-cache_base/44b9c7fb-3d5c-4708-a4ae-a8a2aba3a7ca/44b9c7fb-3d5c-4708-a4ae-a8a2aba3a7ca.vmdk. [ 985.920645] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Cleaning up location [datastore2] OSTACK_IMG_3be16e45-d79e-4346-9dee-4e0c5b2d2252 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 985.920802] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_3be16e45-d79e-4346-9dee-4e0c5b2d2252 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 985.921074] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ffd5b27d-bf93-445f-8fb5-07d2ca48789c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.929251] env[69328]: DEBUG oslo_vmware.api [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 985.929251] env[69328]: value = "task-3273745" [ 985.929251] env[69328]: _type = "Task" [ 985.929251] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.941406] env[69328]: DEBUG oslo_vmware.api [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273745, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.950778] env[69328]: DEBUG oslo_vmware.api [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273744, 'name': PowerOnVM_Task} progress is 78%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.986660] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 985.987014] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b3ce6c2a-3b7a-4d38-bd81-92094f13cae1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.002249] env[69328]: DEBUG oslo_vmware.api [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 986.002249] env[69328]: value = "task-3273746" [ 986.002249] env[69328]: _type = "Task" [ 986.002249] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.010951] env[69328]: DEBUG oslo_vmware.api [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273746, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.167600] env[69328]: DEBUG oslo_vmware.api [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273737, 'name': PowerOnVM_Task, 'duration_secs': 2.601762} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.170695] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 986.170935] env[69328]: DEBUG nova.compute.manager [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 986.172495] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-990821b4-d4b5-41c1-b90f-78efcdc72eb6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.223178] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7ac19bf-9144-426a-846f-821d3edb5fa7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.228708] env[69328]: DEBUG nova.network.neutron [req-a907f15c-8c99-4cbc-8947-2b5748999e24 req-6a25aba4-c53b-4c72-bf65-932041ab7dfe service nova] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Updated VIF entry in instance network info cache for port 7da3de27-ee87-400f-ae26-a3a6995a8363. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 986.229142] env[69328]: DEBUG nova.network.neutron [req-a907f15c-8c99-4cbc-8947-2b5748999e24 req-6a25aba4-c53b-4c72-bf65-932041ab7dfe service nova] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Updating instance_info_cache with network_info: [{"id": "7da3de27-ee87-400f-ae26-a3a6995a8363", "address": "fa:16:3e:91:9b:b5", "network": {"id": "bd41ac10-1850-45a2-8e46-6ebdca3d8e13", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-766393176-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.150", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efd0e2d2f9ba4416bd8fd08dad912465", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e614f8e-6b11-4b6b-a421-904bca6acd91", "external-id": "nsx-vlan-transportzone-923", "segmentation_id": 923, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7da3de27-ee", "ovs_interfaceid": "7da3de27-ee87-400f-ae26-a3a6995a8363", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 986.240034] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd1e721a-13b4-4963-923d-dcaf5cbc9ab2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.288174] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ec23836-0ea9-4d63-8110-9268843fbaf3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.299015] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd5f65cc-f7a8-400d-8161-bd4f9866bf91 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.321919] env[69328]: DEBUG nova.compute.provider_tree [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 986.443073] env[69328]: DEBUG oslo_vmware.api [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273745, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.109149} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.443073] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 986.443073] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Releasing lock "[datastore2] devstack-image-cache_base/44b9c7fb-3d5c-4708-a4ae-a8a2aba3a7ca/44b9c7fb-3d5c-4708-a4ae-a8a2aba3a7ca.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 986.443073] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/44b9c7fb-3d5c-4708-a4ae-a8a2aba3a7ca/44b9c7fb-3d5c-4708-a4ae-a8a2aba3a7ca.vmdk to [datastore2] 51a9c492-6f91-4186-b550-ef12284b8a84/51a9c492-6f91-4186-b550-ef12284b8a84.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 986.446896] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9c092676-e05b-4989-88b3-b0028664ede2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.456131] env[69328]: DEBUG oslo_vmware.api [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273744, 'name': PowerOnVM_Task, 'duration_secs': 0.916403} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.459670] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 986.459670] env[69328]: INFO nova.compute.manager [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] Took 6.83 seconds to spawn the instance on the hypervisor. [ 986.460318] env[69328]: DEBUG nova.compute.manager [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 986.460844] env[69328]: DEBUG oslo_vmware.api [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 986.460844] env[69328]: value = "task-3273747" [ 986.460844] env[69328]: _type = "Task" [ 986.460844] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.461789] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cc7a001-e405-44f9-82f5-7b5c0149c69c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.480134] env[69328]: DEBUG oslo_vmware.api [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273747, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.515679] env[69328]: DEBUG oslo_vmware.api [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273746, 'name': PowerOffVM_Task, 'duration_secs': 0.237916} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.520064] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 986.521274] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Updating instance '96f604a9-e42c-4aa8-b5b5-edcb34901d94' progress to 17 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 986.693406] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 986.732607] env[69328]: DEBUG oslo_concurrency.lockutils [req-a907f15c-8c99-4cbc-8947-2b5748999e24 req-6a25aba4-c53b-4c72-bf65-932041ab7dfe service nova] Releasing lock "refresh_cache-52c87371-4142-40d6-ac68-804aabd9f823" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 986.753315] env[69328]: DEBUG nova.compute.manager [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 986.826555] env[69328]: DEBUG nova.scheduler.client.report [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 986.978577] env[69328]: DEBUG oslo_vmware.api [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273747, 'name': CopyVirtualDisk_Task} progress is 9%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.987914] env[69328]: INFO nova.compute.manager [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] Took 25.53 seconds to build instance. [ 987.029337] env[69328]: DEBUG nova.virt.hardware [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:34:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 987.030271] env[69328]: DEBUG nova.virt.hardware [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 987.030939] env[69328]: DEBUG nova.virt.hardware [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 987.031166] env[69328]: DEBUG nova.virt.hardware [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 987.031357] env[69328]: DEBUG nova.virt.hardware [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 987.031576] env[69328]: DEBUG nova.virt.hardware [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 987.031840] env[69328]: DEBUG nova.virt.hardware [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 987.032033] env[69328]: DEBUG nova.virt.hardware [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 987.032636] env[69328]: DEBUG nova.virt.hardware [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 987.032836] env[69328]: DEBUG nova.virt.hardware [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 987.033277] env[69328]: DEBUG nova.virt.hardware [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 987.038942] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8f46ad13-f640-4607-9231-f24add165238 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.058676] env[69328]: DEBUG oslo_vmware.api [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 987.058676] env[69328]: value = "task-3273748" [ 987.058676] env[69328]: _type = "Task" [ 987.058676] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.072826] env[69328]: DEBUG oslo_vmware.api [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273748, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.269566] env[69328]: DEBUG nova.compute.manager [req-a0cba82d-8a4b-4c04-aa95-93472d831aaf req-59cb8fa1-6881-457c-afc8-1111bca6f196 service nova] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] Received event network-vif-plugged-b7d14440-051d-478f-8bda-be652bd1f72f {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 987.270360] env[69328]: DEBUG oslo_concurrency.lockutils [req-a0cba82d-8a4b-4c04-aa95-93472d831aaf req-59cb8fa1-6881-457c-afc8-1111bca6f196 service nova] Acquiring lock "9f6f8e97-cb21-4984-af08-a63ea4578eef-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 987.270815] env[69328]: DEBUG oslo_concurrency.lockutils [req-a0cba82d-8a4b-4c04-aa95-93472d831aaf req-59cb8fa1-6881-457c-afc8-1111bca6f196 service nova] Lock "9f6f8e97-cb21-4984-af08-a63ea4578eef-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 987.270997] env[69328]: DEBUG oslo_concurrency.lockutils [req-a0cba82d-8a4b-4c04-aa95-93472d831aaf req-59cb8fa1-6881-457c-afc8-1111bca6f196 service nova] Lock "9f6f8e97-cb21-4984-af08-a63ea4578eef-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 987.271879] env[69328]: DEBUG nova.compute.manager [req-a0cba82d-8a4b-4c04-aa95-93472d831aaf req-59cb8fa1-6881-457c-afc8-1111bca6f196 service nova] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] No waiting events found dispatching network-vif-plugged-b7d14440-051d-478f-8bda-be652bd1f72f {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 987.272207] env[69328]: WARNING nova.compute.manager [req-a0cba82d-8a4b-4c04-aa95-93472d831aaf req-59cb8fa1-6881-457c-afc8-1111bca6f196 service nova] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] Received unexpected event network-vif-plugged-b7d14440-051d-478f-8bda-be652bd1f72f for instance with vm_state building and task_state spawning. [ 987.333919] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.641s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 987.335289] env[69328]: DEBUG nova.compute.manager [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 987.340249] env[69328]: DEBUG oslo_concurrency.lockutils [None req-19c2c942-1425-475c-9f96-3dfa57ef38cb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.793s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 987.340890] env[69328]: DEBUG nova.objects.instance [None req-19c2c942-1425-475c-9f96-3dfa57ef38cb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lazy-loading 'resources' on Instance uuid 36f6aab5-2774-402b-9db6-9912f2d5d473 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 987.480152] env[69328]: DEBUG oslo_vmware.api [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273747, 'name': CopyVirtualDisk_Task} progress is 26%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.483737] env[69328]: DEBUG nova.network.neutron [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] Successfully updated port: b7d14440-051d-478f-8bda-be652bd1f72f {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 987.491459] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e3e18147-23ab-49ab-8496-4c109e41e077 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "b61436f5-0e8b-4da5-9459-cf9487dfd23f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.048s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 987.571552] env[69328]: DEBUG oslo_vmware.api [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273748, 'name': ReconfigVM_Task, 'duration_secs': 0.467072} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.572343] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Updating instance '96f604a9-e42c-4aa8-b5b5-edcb34901d94' progress to 33 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 987.850503] env[69328]: DEBUG nova.compute.utils [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 987.852153] env[69328]: DEBUG nova.compute.manager [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 987.853379] env[69328]: DEBUG nova.network.neutron [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 987.905437] env[69328]: DEBUG nova.policy [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '894cc1336f714e29b2a87bf256a85a21', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '38528276c7744d798af4057d29c88ddb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 987.989113] env[69328]: DEBUG oslo_vmware.api [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273747, 'name': CopyVirtualDisk_Task} progress is 46%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.995136] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquiring lock "refresh_cache-9f6f8e97-cb21-4984-af08-a63ea4578eef" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 987.995492] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquired lock "refresh_cache-9f6f8e97-cb21-4984-af08-a63ea4578eef" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 987.995861] env[69328]: DEBUG nova.network.neutron [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 988.080640] env[69328]: DEBUG nova.virt.hardware [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 988.081317] env[69328]: DEBUG nova.virt.hardware [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 988.081699] env[69328]: DEBUG nova.virt.hardware [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 988.082026] env[69328]: DEBUG nova.virt.hardware [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 988.082123] env[69328]: DEBUG nova.virt.hardware [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 988.082260] env[69328]: DEBUG nova.virt.hardware [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 988.082485] env[69328]: DEBUG nova.virt.hardware [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 988.082681] env[69328]: DEBUG nova.virt.hardware [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 988.082913] env[69328]: DEBUG nova.virt.hardware [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 988.083275] env[69328]: DEBUG nova.virt.hardware [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 988.083373] env[69328]: DEBUG nova.virt.hardware [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 988.089477] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Reconfiguring VM instance instance-00000051 to detach disk 2000 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 988.093741] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fa639aec-403c-4cda-b978-c5e877a21033 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.118826] env[69328]: DEBUG oslo_vmware.api [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 988.118826] env[69328]: value = "task-3273749" [ 988.118826] env[69328]: _type = "Task" [ 988.118826] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.131417] env[69328]: DEBUG oslo_vmware.api [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273749, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.356252] env[69328]: DEBUG nova.compute.manager [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 988.381331] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d144049a-58dc-4fa4-8176-341407c11935 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.391947] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb0a91d2-6cad-448e-81f3-3aa2988090a6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.433764] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b93e848-4fba-4ca8-9654-efb221f87826 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.445386] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a840ea7-ba62-48b3-abce-9b6d42b96bed {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.464361] env[69328]: DEBUG nova.compute.provider_tree [None req-19c2c942-1425-475c-9f96-3dfa57ef38cb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 988.485345] env[69328]: DEBUG oslo_vmware.api [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273747, 'name': CopyVirtualDisk_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.499611] env[69328]: DEBUG nova.network.neutron [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Successfully created port: 3b413041-b9e3-47e2-a4f8-f828e31f079a {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 988.527490] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30418418-2f86-400f-8787-d406062541a6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.539021] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-947e4792-fe0c-4ba6-aaba-916c816893ee tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] Suspending the VM {{(pid=69328) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 988.539724] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-748b1eca-804e-4c1c-b154-7697bf5dc7a6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.552037] env[69328]: DEBUG oslo_vmware.api [None req-947e4792-fe0c-4ba6-aaba-916c816893ee tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 988.552037] env[69328]: value = "task-3273750" [ 988.552037] env[69328]: _type = "Task" [ 988.552037] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.567416] env[69328]: DEBUG oslo_vmware.api [None req-947e4792-fe0c-4ba6-aaba-916c816893ee tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273750, 'name': SuspendVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.571529] env[69328]: DEBUG nova.network.neutron [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 988.632655] env[69328]: DEBUG oslo_vmware.api [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273749, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.797607] env[69328]: DEBUG nova.network.neutron [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] Updating instance_info_cache with network_info: [{"id": "b7d14440-051d-478f-8bda-be652bd1f72f", "address": "fa:16:3e:f6:20:64", "network": {"id": "77f17547-8c62-4a31-9840-8d2cbb1bfbab", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-163692077-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bad0df17bba4bc996fe5cf1faf23fad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7d14440-05", "ovs_interfaceid": "b7d14440-051d-478f-8bda-be652bd1f72f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 988.972331] env[69328]: DEBUG nova.scheduler.client.report [None req-19c2c942-1425-475c-9f96-3dfa57ef38cb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 988.989029] env[69328]: DEBUG oslo_vmware.api [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273747, 'name': CopyVirtualDisk_Task} progress is 85%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.062898] env[69328]: DEBUG oslo_vmware.api [None req-947e4792-fe0c-4ba6-aaba-916c816893ee tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273750, 'name': SuspendVM_Task} progress is 50%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.133246] env[69328]: DEBUG oslo_vmware.api [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273749, 'name': ReconfigVM_Task, 'duration_secs': 0.539497} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.133613] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Reconfigured VM instance instance-00000051 to detach disk 2000 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 989.134598] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f721dec1-683e-4920-99d3-ca66bcbd6135 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.161270] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Reconfiguring VM instance instance-00000051 to attach disk [datastore2] 96f604a9-e42c-4aa8-b5b5-edcb34901d94/96f604a9-e42c-4aa8-b5b5-edcb34901d94.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 989.161270] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4c3623e4-23d9-4f16-ad60-2bdf990a94c7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.182296] env[69328]: DEBUG oslo_vmware.api [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 989.182296] env[69328]: value = "task-3273751" [ 989.182296] env[69328]: _type = "Task" [ 989.182296] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.192041] env[69328]: DEBUG oslo_vmware.api [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273751, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.302206] env[69328]: DEBUG nova.compute.manager [req-33d558d4-e84e-4a0c-b259-eff654f4e70f req-d25cce20-e7a3-43b4-a7df-7501a69bf9b8 service nova] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] Received event network-changed-b7d14440-051d-478f-8bda-be652bd1f72f {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 989.302206] env[69328]: DEBUG nova.compute.manager [req-33d558d4-e84e-4a0c-b259-eff654f4e70f req-d25cce20-e7a3-43b4-a7df-7501a69bf9b8 service nova] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] Refreshing instance network info cache due to event network-changed-b7d14440-051d-478f-8bda-be652bd1f72f. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 989.302401] env[69328]: DEBUG oslo_concurrency.lockutils [req-33d558d4-e84e-4a0c-b259-eff654f4e70f req-d25cce20-e7a3-43b4-a7df-7501a69bf9b8 service nova] Acquiring lock "refresh_cache-9f6f8e97-cb21-4984-af08-a63ea4578eef" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 989.303313] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Releasing lock "refresh_cache-9f6f8e97-cb21-4984-af08-a63ea4578eef" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 989.303313] env[69328]: DEBUG nova.compute.manager [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] Instance network_info: |[{"id": "b7d14440-051d-478f-8bda-be652bd1f72f", "address": "fa:16:3e:f6:20:64", "network": {"id": "77f17547-8c62-4a31-9840-8d2cbb1bfbab", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-163692077-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bad0df17bba4bc996fe5cf1faf23fad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7d14440-05", "ovs_interfaceid": "b7d14440-051d-478f-8bda-be652bd1f72f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 989.303948] env[69328]: DEBUG oslo_concurrency.lockutils [req-33d558d4-e84e-4a0c-b259-eff654f4e70f req-d25cce20-e7a3-43b4-a7df-7501a69bf9b8 service nova] Acquired lock "refresh_cache-9f6f8e97-cb21-4984-af08-a63ea4578eef" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 989.304196] env[69328]: DEBUG nova.network.neutron [req-33d558d4-e84e-4a0c-b259-eff654f4e70f req-d25cce20-e7a3-43b4-a7df-7501a69bf9b8 service nova] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] Refreshing network info cache for port b7d14440-051d-478f-8bda-be652bd1f72f {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 989.370232] env[69328]: DEBUG nova.compute.manager [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 989.483084] env[69328]: DEBUG oslo_concurrency.lockutils [None req-19c2c942-1425-475c-9f96-3dfa57ef38cb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.143s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 989.490034] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9fff04c7-c0f7-43d9-9f9e-4f1b4cddc38c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.474s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 989.490458] env[69328]: DEBUG nova.objects.instance [None req-9fff04c7-c0f7-43d9-9f9e-4f1b4cddc38c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lazy-loading 'resources' on Instance uuid 4d320c76-45bb-451c-8fbb-3dd2d64f56d5 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 989.491635] env[69328]: DEBUG oslo_vmware.api [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273747, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.875469} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.492170] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/44b9c7fb-3d5c-4708-a4ae-a8a2aba3a7ca/44b9c7fb-3d5c-4708-a4ae-a8a2aba3a7ca.vmdk to [datastore2] 51a9c492-6f91-4186-b550-ef12284b8a84/51a9c492-6f91-4186-b550-ef12284b8a84.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 989.493137] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cd48ec2-ae97-4d60-8823-e8e7f9b90778 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.511051] env[69328]: DEBUG oslo_concurrency.lockutils [None req-181903c3-ccac-4553-aa0a-ffb84d6ca8e7 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "f1be93b2-08db-41fe-87c4-f4e5f964cfa4" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 989.511320] env[69328]: DEBUG oslo_concurrency.lockutils [None req-181903c3-ccac-4553-aa0a-ffb84d6ca8e7 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "f1be93b2-08db-41fe-87c4-f4e5f964cfa4" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 989.511756] env[69328]: INFO nova.compute.manager [None req-181903c3-ccac-4553-aa0a-ffb84d6ca8e7 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Rebooting instance [ 989.522879] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Reconfiguring VM instance instance-00000057 to attach disk [datastore2] 51a9c492-6f91-4186-b550-ef12284b8a84/51a9c492-6f91-4186-b550-ef12284b8a84.vmdk or device None with type streamOptimized {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 989.524075] env[69328]: INFO nova.scheduler.client.report [None req-19c2c942-1425-475c-9f96-3dfa57ef38cb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Deleted allocations for instance 36f6aab5-2774-402b-9db6-9912f2d5d473 [ 989.528888] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3efa17fd-13fd-42e9-8228-2e853ba70896 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.559138] env[69328]: DEBUG oslo_vmware.api [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 989.559138] env[69328]: value = "task-3273752" [ 989.559138] env[69328]: _type = "Task" [ 989.559138] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.566685] env[69328]: DEBUG oslo_vmware.api [None req-947e4792-fe0c-4ba6-aaba-916c816893ee tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273750, 'name': SuspendVM_Task, 'duration_secs': 0.993688} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.568132] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-947e4792-fe0c-4ba6-aaba-916c816893ee tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] Suspended the VM {{(pid=69328) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 989.568889] env[69328]: DEBUG nova.compute.manager [None req-947e4792-fe0c-4ba6-aaba-916c816893ee tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 989.569563] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00523285-1a53-4868-824a-200ff44b095f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.577071] env[69328]: DEBUG oslo_vmware.api [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273752, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.693665] env[69328]: DEBUG oslo_vmware.api [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273751, 'name': ReconfigVM_Task, 'duration_secs': 0.409206} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.694026] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Reconfigured VM instance instance-00000051 to attach disk [datastore2] 96f604a9-e42c-4aa8-b5b5-edcb34901d94/96f604a9-e42c-4aa8-b5b5-edcb34901d94.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 989.695041] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Updating instance '96f604a9-e42c-4aa8-b5b5-edcb34901d94' progress to 50 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 990.032879] env[69328]: DEBUG nova.network.neutron [req-33d558d4-e84e-4a0c-b259-eff654f4e70f req-d25cce20-e7a3-43b4-a7df-7501a69bf9b8 service nova] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] Updated VIF entry in instance network info cache for port b7d14440-051d-478f-8bda-be652bd1f72f. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 990.033517] env[69328]: DEBUG nova.network.neutron [req-33d558d4-e84e-4a0c-b259-eff654f4e70f req-d25cce20-e7a3-43b4-a7df-7501a69bf9b8 service nova] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] Updating instance_info_cache with network_info: [{"id": "b7d14440-051d-478f-8bda-be652bd1f72f", "address": "fa:16:3e:f6:20:64", "network": {"id": "77f17547-8c62-4a31-9840-8d2cbb1bfbab", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-163692077-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bad0df17bba4bc996fe5cf1faf23fad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7d14440-05", "ovs_interfaceid": "b7d14440-051d-478f-8bda-be652bd1f72f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 990.043305] env[69328]: DEBUG oslo_concurrency.lockutils [None req-181903c3-ccac-4553-aa0a-ffb84d6ca8e7 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "refresh_cache-f1be93b2-08db-41fe-87c4-f4e5f964cfa4" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 990.043495] env[69328]: DEBUG oslo_concurrency.lockutils [None req-181903c3-ccac-4553-aa0a-ffb84d6ca8e7 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquired lock "refresh_cache-f1be93b2-08db-41fe-87c4-f4e5f964cfa4" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 990.043683] env[69328]: DEBUG nova.network.neutron [None req-181903c3-ccac-4553-aa0a-ffb84d6ca8e7 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 990.056218] env[69328]: DEBUG oslo_concurrency.lockutils [None req-19c2c942-1425-475c-9f96-3dfa57ef38cb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "36f6aab5-2774-402b-9db6-9912f2d5d473" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.078s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 990.074176] env[69328]: DEBUG oslo_vmware.api [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273752, 'name': ReconfigVM_Task, 'duration_secs': 0.317} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.074465] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Reconfigured VM instance instance-00000057 to attach disk [datastore2] 51a9c492-6f91-4186-b550-ef12284b8a84/51a9c492-6f91-4186-b550-ef12284b8a84.vmdk or device None with type streamOptimized {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 990.075165] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-576ed761-fe1c-46a1-872a-fd1a30e7ee5a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.090612] env[69328]: DEBUG oslo_vmware.api [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 990.090612] env[69328]: value = "task-3273753" [ 990.090612] env[69328]: _type = "Task" [ 990.090612] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.105507] env[69328]: DEBUG oslo_vmware.api [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273753, 'name': Rename_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.211600] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ec738dc-5d3e-47b1-bfa6-805ddb25dd77 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.214038] env[69328]: DEBUG nova.network.neutron [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Successfully updated port: 3b413041-b9e3-47e2-a4f8-f828e31f079a {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 990.249195] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f32a920-1781-4200-bb4c-f36619ee1d24 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.280317] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Updating instance '96f604a9-e42c-4aa8-b5b5-edcb34901d94' progress to 67 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 990.455173] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-949e93f1-0447-49de-9596-e6dba8b523a5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.464923] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b2c2f6d-30a0-4f1b-b546-56e007a0ef9f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.500103] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8dd4366-6c21-412a-ad54-54e9ab8e634b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.508692] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d220ec6-50d2-4e14-af1d-d77549b4eb95 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.531604] env[69328]: DEBUG nova.compute.provider_tree [None req-9fff04c7-c0f7-43d9-9f9e-4f1b4cddc38c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 990.538740] env[69328]: DEBUG oslo_concurrency.lockutils [req-33d558d4-e84e-4a0c-b259-eff654f4e70f req-d25cce20-e7a3-43b4-a7df-7501a69bf9b8 service nova] Releasing lock "refresh_cache-9f6f8e97-cb21-4984-af08-a63ea4578eef" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 990.608127] env[69328]: DEBUG oslo_vmware.api [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273753, 'name': Rename_Task, 'duration_secs': 0.15427} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.608477] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 990.608785] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-75b87708-f415-4aad-95e6-68a1990008ce {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.618475] env[69328]: DEBUG oslo_vmware.api [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 990.618475] env[69328]: value = "task-3273754" [ 990.618475] env[69328]: _type = "Task" [ 990.618475] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.630275] env[69328]: DEBUG oslo_vmware.api [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273754, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.716751] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Acquiring lock "refresh_cache-65fccb3f-5e0e-4140-be0a-5ba20f494d50" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 990.717011] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Acquired lock "refresh_cache-65fccb3f-5e0e-4140-be0a-5ba20f494d50" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 990.717249] env[69328]: DEBUG nova.network.neutron [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 990.865117] env[69328]: DEBUG nova.network.neutron [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Port d97e62a9-59f8-4f3b-9296-f5a0803d2b10 binding to destination host cpu-1 is already ACTIVE {{(pid=69328) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 991.020078] env[69328]: DEBUG oslo_concurrency.lockutils [None req-97563d6f-f1dd-4a10-8a2a-3d9aabc95961 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "b61436f5-0e8b-4da5-9459-cf9487dfd23f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 991.020078] env[69328]: DEBUG oslo_concurrency.lockutils [None req-97563d6f-f1dd-4a10-8a2a-3d9aabc95961 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "b61436f5-0e8b-4da5-9459-cf9487dfd23f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 991.020078] env[69328]: DEBUG oslo_concurrency.lockutils [None req-97563d6f-f1dd-4a10-8a2a-3d9aabc95961 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "b61436f5-0e8b-4da5-9459-cf9487dfd23f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 991.020078] env[69328]: DEBUG oslo_concurrency.lockutils [None req-97563d6f-f1dd-4a10-8a2a-3d9aabc95961 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "b61436f5-0e8b-4da5-9459-cf9487dfd23f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 991.020078] env[69328]: DEBUG oslo_concurrency.lockutils [None req-97563d6f-f1dd-4a10-8a2a-3d9aabc95961 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "b61436f5-0e8b-4da5-9459-cf9487dfd23f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 991.023503] env[69328]: INFO nova.compute.manager [None req-97563d6f-f1dd-4a10-8a2a-3d9aabc95961 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] Terminating instance [ 991.035160] env[69328]: DEBUG nova.scheduler.client.report [None req-9fff04c7-c0f7-43d9-9f9e-4f1b4cddc38c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 991.131642] env[69328]: DEBUG oslo_vmware.api [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273754, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.227101] env[69328]: DEBUG nova.network.neutron [None req-181903c3-ccac-4553-aa0a-ffb84d6ca8e7 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Updating instance_info_cache with network_info: [{"id": "1018560a-13d7-4d01-8fc4-03d0b9beab90", "address": "fa:16:3e:33:ba:27", "network": {"id": "4031def8-0553-461f-9528-aa338b9d7b2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1834835647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.196", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f357b5a9494b4849a83aa934c5d4e26b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "357d2811-e990-4985-9f9e-b158d10d3699", "external-id": "nsx-vlan-transportzone-641", "segmentation_id": 641, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1018560a-13", "ovs_interfaceid": "1018560a-13d7-4d01-8fc4-03d0b9beab90", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 991.280227] env[69328]: DEBUG nova.network.neutron [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 991.400624] env[69328]: DEBUG nova.compute.manager [req-17c1bcd5-6000-452e-b2e0-ba37ba971115 req-cb622e2e-59c7-4a49-ae9b-6090ac21c142 service nova] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Received event network-vif-plugged-3b413041-b9e3-47e2-a4f8-f828e31f079a {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 991.400757] env[69328]: DEBUG oslo_concurrency.lockutils [req-17c1bcd5-6000-452e-b2e0-ba37ba971115 req-cb622e2e-59c7-4a49-ae9b-6090ac21c142 service nova] Acquiring lock "65fccb3f-5e0e-4140-be0a-5ba20f494d50-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 991.401216] env[69328]: DEBUG oslo_concurrency.lockutils [req-17c1bcd5-6000-452e-b2e0-ba37ba971115 req-cb622e2e-59c7-4a49-ae9b-6090ac21c142 service nova] Lock "65fccb3f-5e0e-4140-be0a-5ba20f494d50-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 991.401436] env[69328]: DEBUG oslo_concurrency.lockutils [req-17c1bcd5-6000-452e-b2e0-ba37ba971115 req-cb622e2e-59c7-4a49-ae9b-6090ac21c142 service nova] Lock "65fccb3f-5e0e-4140-be0a-5ba20f494d50-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 991.401643] env[69328]: DEBUG nova.compute.manager [req-17c1bcd5-6000-452e-b2e0-ba37ba971115 req-cb622e2e-59c7-4a49-ae9b-6090ac21c142 service nova] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] No waiting events found dispatching network-vif-plugged-3b413041-b9e3-47e2-a4f8-f828e31f079a {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 991.401919] env[69328]: WARNING nova.compute.manager [req-17c1bcd5-6000-452e-b2e0-ba37ba971115 req-cb622e2e-59c7-4a49-ae9b-6090ac21c142 service nova] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Received unexpected event network-vif-plugged-3b413041-b9e3-47e2-a4f8-f828e31f079a for instance with vm_state building and task_state spawning. [ 991.402107] env[69328]: DEBUG nova.compute.manager [req-17c1bcd5-6000-452e-b2e0-ba37ba971115 req-cb622e2e-59c7-4a49-ae9b-6090ac21c142 service nova] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Received event network-changed-3b413041-b9e3-47e2-a4f8-f828e31f079a {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 991.402308] env[69328]: DEBUG nova.compute.manager [req-17c1bcd5-6000-452e-b2e0-ba37ba971115 req-cb622e2e-59c7-4a49-ae9b-6090ac21c142 service nova] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Refreshing instance network info cache due to event network-changed-3b413041-b9e3-47e2-a4f8-f828e31f079a. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 991.402493] env[69328]: DEBUG oslo_concurrency.lockutils [req-17c1bcd5-6000-452e-b2e0-ba37ba971115 req-cb622e2e-59c7-4a49-ae9b-6090ac21c142 service nova] Acquiring lock "refresh_cache-65fccb3f-5e0e-4140-be0a-5ba20f494d50" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 991.528955] env[69328]: DEBUG nova.compute.manager [None req-97563d6f-f1dd-4a10-8a2a-3d9aabc95961 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 991.529404] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-97563d6f-f1dd-4a10-8a2a-3d9aabc95961 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 991.530236] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91bbf95f-3c4a-4a9b-bc4d-53aae479786c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.539891] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-97563d6f-f1dd-4a10-8a2a-3d9aabc95961 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 991.540195] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-83a303ae-b2af-447f-a128-23d99bd21d27 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.542415] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9fff04c7-c0f7-43d9-9f9e-4f1b4cddc38c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.053s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 991.544648] env[69328]: DEBUG oslo_concurrency.lockutils [None req-dfb65d93-8fb4-40e0-8193-169d3590a0ed tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.047s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 991.544911] env[69328]: DEBUG nova.objects.instance [None req-dfb65d93-8fb4-40e0-8193-169d3590a0ed tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Lazy-loading 'resources' on Instance uuid 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 991.574155] env[69328]: INFO nova.scheduler.client.report [None req-9fff04c7-c0f7-43d9-9f9e-4f1b4cddc38c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Deleted allocations for instance 4d320c76-45bb-451c-8fbb-3dd2d64f56d5 [ 991.606424] env[69328]: DEBUG nova.network.neutron [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Updating instance_info_cache with network_info: [{"id": "3b413041-b9e3-47e2-a4f8-f828e31f079a", "address": "fa:16:3e:ce:7b:87", "network": {"id": "4e070871-25f8-4279-bd7d-24dcf5a39f03", "bridge": "br-int", "label": "tempest-ServersTestJSON-1512383675-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38528276c7744d798af4057d29c88ddb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61b8f0db-488e-42d7-bf6c-6c1665cd5616", "external-id": "nsx-vlan-transportzone-655", "segmentation_id": 655, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b413041-b9", "ovs_interfaceid": "3b413041-b9e3-47e2-a4f8-f828e31f079a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 991.633621] env[69328]: DEBUG oslo_vmware.api [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273754, 'name': PowerOnVM_Task, 'duration_secs': 0.53447} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.634967] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 991.635361] env[69328]: INFO nova.compute.manager [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Took 14.63 seconds to spawn the instance on the hypervisor. [ 991.635714] env[69328]: DEBUG nova.compute.manager [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 991.637194] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e8008eb-8133-4aba-af5e-bfce2a902d0a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.641742] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-97563d6f-f1dd-4a10-8a2a-3d9aabc95961 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 991.641982] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-97563d6f-f1dd-4a10-8a2a-3d9aabc95961 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 991.642655] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-97563d6f-f1dd-4a10-8a2a-3d9aabc95961 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Deleting the datastore file [datastore1] b61436f5-0e8b-4da5-9459-cf9487dfd23f {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 991.642879] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5ce71d4e-d1b4-430d-9301-c6eb9d4b50c5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.667262] env[69328]: DEBUG oslo_vmware.api [None req-97563d6f-f1dd-4a10-8a2a-3d9aabc95961 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 991.667262] env[69328]: value = "task-3273756" [ 991.667262] env[69328]: _type = "Task" [ 991.667262] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.677553] env[69328]: DEBUG oslo_vmware.api [None req-97563d6f-f1dd-4a10-8a2a-3d9aabc95961 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273756, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.733258] env[69328]: DEBUG oslo_concurrency.lockutils [None req-181903c3-ccac-4553-aa0a-ffb84d6ca8e7 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Releasing lock "refresh_cache-f1be93b2-08db-41fe-87c4-f4e5f964cfa4" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 991.893480] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "96f604a9-e42c-4aa8-b5b5-edcb34901d94-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 991.893764] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "96f604a9-e42c-4aa8-b5b5-edcb34901d94-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 991.893944] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "96f604a9-e42c-4aa8-b5b5-edcb34901d94-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 992.084211] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9fff04c7-c0f7-43d9-9f9e-4f1b4cddc38c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "4d320c76-45bb-451c-8fbb-3dd2d64f56d5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.509s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 992.110819] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Releasing lock "refresh_cache-65fccb3f-5e0e-4140-be0a-5ba20f494d50" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 992.111158] env[69328]: DEBUG nova.compute.manager [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Instance network_info: |[{"id": "3b413041-b9e3-47e2-a4f8-f828e31f079a", "address": "fa:16:3e:ce:7b:87", "network": {"id": "4e070871-25f8-4279-bd7d-24dcf5a39f03", "bridge": "br-int", "label": "tempest-ServersTestJSON-1512383675-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38528276c7744d798af4057d29c88ddb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61b8f0db-488e-42d7-bf6c-6c1665cd5616", "external-id": "nsx-vlan-transportzone-655", "segmentation_id": 655, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b413041-b9", "ovs_interfaceid": "3b413041-b9e3-47e2-a4f8-f828e31f079a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 992.111647] env[69328]: DEBUG oslo_concurrency.lockutils [req-17c1bcd5-6000-452e-b2e0-ba37ba971115 req-cb622e2e-59c7-4a49-ae9b-6090ac21c142 service nova] Acquired lock "refresh_cache-65fccb3f-5e0e-4140-be0a-5ba20f494d50" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 992.111904] env[69328]: DEBUG nova.network.neutron [req-17c1bcd5-6000-452e-b2e0-ba37ba971115 req-cb622e2e-59c7-4a49-ae9b-6090ac21c142 service nova] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Refreshing network info cache for port 3b413041-b9e3-47e2-a4f8-f828e31f079a {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 992.178714] env[69328]: INFO nova.compute.manager [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Took 31.04 seconds to build instance. [ 992.184583] env[69328]: DEBUG oslo_vmware.api [None req-97563d6f-f1dd-4a10-8a2a-3d9aabc95961 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273756, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.280976} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.187357] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-97563d6f-f1dd-4a10-8a2a-3d9aabc95961 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 992.187499] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-97563d6f-f1dd-4a10-8a2a-3d9aabc95961 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 992.187680] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-97563d6f-f1dd-4a10-8a2a-3d9aabc95961 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 992.187922] env[69328]: INFO nova.compute.manager [None req-97563d6f-f1dd-4a10-8a2a-3d9aabc95961 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] Took 0.66 seconds to destroy the instance on the hypervisor. [ 992.188137] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-97563d6f-f1dd-4a10-8a2a-3d9aabc95961 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 992.188853] env[69328]: DEBUG nova.compute.manager [-] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 992.188940] env[69328]: DEBUG nova.network.neutron [-] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 992.237981] env[69328]: DEBUG nova.compute.manager [None req-181903c3-ccac-4553-aa0a-ffb84d6ca8e7 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 992.238791] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92c36ff3-764c-4e2b-89ae-98b7ef2ae828 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.395506] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7559f1ec-5c12-4a3a-9151-2fc35dddecf6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.409642] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a03365f-ad0d-4372-b9cb-ccfbe47bbf7d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.450905] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a63dd07b-79d1-4d08-941c-4c2592b09c86 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.462308] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79f779c2-2526-4ae3-ae4c-72e2d270b0cc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.478436] env[69328]: DEBUG nova.compute.provider_tree [None req-dfb65d93-8fb4-40e0-8193-169d3590a0ed tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 992.621527] env[69328]: DEBUG nova.compute.manager [req-e74147c6-698c-4ae9-ac1e-5528802ae346 req-5b4aeec8-15a2-451c-a821-761cf40f81a6 service nova] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] Received event network-vif-deleted-3d319ee9-4b9f-43cd-b96e-f3b35e34ec76 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 992.621835] env[69328]: INFO nova.compute.manager [req-e74147c6-698c-4ae9-ac1e-5528802ae346 req-5b4aeec8-15a2-451c-a821-761cf40f81a6 service nova] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] Neutron deleted interface 3d319ee9-4b9f-43cd-b96e-f3b35e34ec76; detaching it from the instance and deleting it from the info cache [ 992.622105] env[69328]: DEBUG nova.network.neutron [req-e74147c6-698c-4ae9-ac1e-5528802ae346 req-5b4aeec8-15a2-451c-a821-761cf40f81a6 service nova] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 992.681748] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2624031b-7ee5-4078-b54b-bb564439001b tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lock "51a9c492-6f91-4186-b550-ef12284b8a84" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.555s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 992.909700] env[69328]: DEBUG nova.network.neutron [req-17c1bcd5-6000-452e-b2e0-ba37ba971115 req-cb622e2e-59c7-4a49-ae9b-6090ac21c142 service nova] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Updated VIF entry in instance network info cache for port 3b413041-b9e3-47e2-a4f8-f828e31f079a. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 992.910143] env[69328]: DEBUG nova.network.neutron [req-17c1bcd5-6000-452e-b2e0-ba37ba971115 req-cb622e2e-59c7-4a49-ae9b-6090ac21c142 service nova] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Updating instance_info_cache with network_info: [{"id": "3b413041-b9e3-47e2-a4f8-f828e31f079a", "address": "fa:16:3e:ce:7b:87", "network": {"id": "4e070871-25f8-4279-bd7d-24dcf5a39f03", "bridge": "br-int", "label": "tempest-ServersTestJSON-1512383675-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38528276c7744d798af4057d29c88ddb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61b8f0db-488e-42d7-bf6c-6c1665cd5616", "external-id": "nsx-vlan-transportzone-655", "segmentation_id": 655, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b413041-b9", "ovs_interfaceid": "3b413041-b9e3-47e2-a4f8-f828e31f079a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 992.932474] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "refresh_cache-96f604a9-e42c-4aa8-b5b5-edcb34901d94" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 992.932678] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquired lock "refresh_cache-96f604a9-e42c-4aa8-b5b5-edcb34901d94" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 992.932896] env[69328]: DEBUG nova.network.neutron [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 992.955380] env[69328]: DEBUG nova.network.neutron [-] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 992.982148] env[69328]: DEBUG nova.scheduler.client.report [None req-dfb65d93-8fb4-40e0-8193-169d3590a0ed tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 993.125423] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-992b6d24-14fb-422a-8298-e00e63f58e8f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.137427] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc02a9f5-03cd-4275-afed-b5843be1d737 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.182316] env[69328]: DEBUG nova.compute.manager [req-e74147c6-698c-4ae9-ac1e-5528802ae346 req-5b4aeec8-15a2-451c-a821-761cf40f81a6 service nova] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] Detach interface failed, port_id=3d319ee9-4b9f-43cd-b96e-f3b35e34ec76, reason: Instance b61436f5-0e8b-4da5-9459-cf9487dfd23f could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 993.258355] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-745d4539-4e79-43e5-9235-1877b97fa79d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.266749] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-181903c3-ccac-4553-aa0a-ffb84d6ca8e7 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Doing hard reboot of VM {{(pid=69328) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 993.267066] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-a9bddf55-c009-4385-af98-172954d10aac {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.274731] env[69328]: DEBUG oslo_vmware.api [None req-181903c3-ccac-4553-aa0a-ffb84d6ca8e7 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 993.274731] env[69328]: value = "task-3273757" [ 993.274731] env[69328]: _type = "Task" [ 993.274731] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.284587] env[69328]: DEBUG oslo_vmware.api [None req-181903c3-ccac-4553-aa0a-ffb84d6ca8e7 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3273757, 'name': ResetVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.412987] env[69328]: DEBUG oslo_concurrency.lockutils [req-17c1bcd5-6000-452e-b2e0-ba37ba971115 req-cb622e2e-59c7-4a49-ae9b-6090ac21c142 service nova] Releasing lock "refresh_cache-65fccb3f-5e0e-4140-be0a-5ba20f494d50" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 993.458148] env[69328]: INFO nova.compute.manager [-] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] Took 1.27 seconds to deallocate network for instance. [ 993.487047] env[69328]: DEBUG oslo_concurrency.lockutils [None req-dfb65d93-8fb4-40e0-8193-169d3590a0ed tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.942s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 993.489427] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.194s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 993.490948] env[69328]: INFO nova.compute.claims [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 993.516154] env[69328]: INFO nova.scheduler.client.report [None req-dfb65d93-8fb4-40e0-8193-169d3590a0ed tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Deleted allocations for instance 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea [ 993.570027] env[69328]: DEBUG oslo_concurrency.lockutils [None req-39496824-d7d3-42e2-a8f1-da97e3053f4e tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquiring lock "51a9c492-6f91-4186-b550-ef12284b8a84" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 993.570027] env[69328]: DEBUG oslo_concurrency.lockutils [None req-39496824-d7d3-42e2-a8f1-da97e3053f4e tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lock "51a9c492-6f91-4186-b550-ef12284b8a84" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 993.570027] env[69328]: DEBUG oslo_concurrency.lockutils [None req-39496824-d7d3-42e2-a8f1-da97e3053f4e tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquiring lock "51a9c492-6f91-4186-b550-ef12284b8a84-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 993.570027] env[69328]: DEBUG oslo_concurrency.lockutils [None req-39496824-d7d3-42e2-a8f1-da97e3053f4e tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lock "51a9c492-6f91-4186-b550-ef12284b8a84-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 993.570027] env[69328]: DEBUG oslo_concurrency.lockutils [None req-39496824-d7d3-42e2-a8f1-da97e3053f4e tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lock "51a9c492-6f91-4186-b550-ef12284b8a84-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 993.572789] env[69328]: INFO nova.compute.manager [None req-39496824-d7d3-42e2-a8f1-da97e3053f4e tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Terminating instance [ 993.746896] env[69328]: DEBUG nova.network.neutron [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Updating instance_info_cache with network_info: [{"id": "d97e62a9-59f8-4f3b-9296-f5a0803d2b10", "address": "fa:16:3e:81:01:fa", "network": {"id": "e2ab6957-2c9d-4b95-91bd-5a62d9a284ba", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-967470666-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75d5853e3c724d02bacfa75173e38ab3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abcf0d10-3f3f-45dc-923e-1c78766e2dad", "external-id": "nsx-vlan-transportzone-405", "segmentation_id": 405, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd97e62a9-59", "ovs_interfaceid": "d97e62a9-59f8-4f3b-9296-f5a0803d2b10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 993.786356] env[69328]: DEBUG oslo_vmware.api [None req-181903c3-ccac-4553-aa0a-ffb84d6ca8e7 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3273757, 'name': ResetVM_Task, 'duration_secs': 0.102488} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.786668] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-181903c3-ccac-4553-aa0a-ffb84d6ca8e7 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Did hard reboot of VM {{(pid=69328) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 993.786855] env[69328]: DEBUG nova.compute.manager [None req-181903c3-ccac-4553-aa0a-ffb84d6ca8e7 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 993.787769] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-539c4cb9-fdbe-4302-a16e-331908968298 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.965509] env[69328]: DEBUG oslo_concurrency.lockutils [None req-97563d6f-f1dd-4a10-8a2a-3d9aabc95961 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 994.024610] env[69328]: DEBUG oslo_concurrency.lockutils [None req-dfb65d93-8fb4-40e0-8193-169d3590a0ed tempest-InstanceActionsNegativeTestJSON-299004850 tempest-InstanceActionsNegativeTestJSON-299004850-project-member] Lock "7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.033s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 994.078029] env[69328]: DEBUG nova.compute.manager [None req-39496824-d7d3-42e2-a8f1-da97e3053f4e tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 994.078241] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-39496824-d7d3-42e2-a8f1-da97e3053f4e tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 994.079313] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d49cb93b-6ec5-47b1-a6a0-dbcc8b255647 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.091923] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-39496824-d7d3-42e2-a8f1-da97e3053f4e tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 994.092336] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b1d1a04d-6c22-4d0f-b56c-490843d2086c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.100355] env[69328]: DEBUG oslo_vmware.api [None req-39496824-d7d3-42e2-a8f1-da97e3053f4e tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 994.100355] env[69328]: value = "task-3273758" [ 994.100355] env[69328]: _type = "Task" [ 994.100355] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.113382] env[69328]: DEBUG oslo_vmware.api [None req-39496824-d7d3-42e2-a8f1-da97e3053f4e tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273758, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.213083] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7d02ecea-29c6-42cc-978d-a1b74cecc92b tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquiring lock "bc9c3a41-7264-4d69-bc15-397b5fa0a8ad" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 994.213578] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7d02ecea-29c6-42cc-978d-a1b74cecc92b tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "bc9c3a41-7264-4d69-bc15-397b5fa0a8ad" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 994.248832] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Releasing lock "refresh_cache-96f604a9-e42c-4aa8-b5b5-edcb34901d94" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 994.301765] env[69328]: DEBUG oslo_concurrency.lockutils [None req-181903c3-ccac-4553-aa0a-ffb84d6ca8e7 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "f1be93b2-08db-41fe-87c4-f4e5f964cfa4" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.790s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 994.616411] env[69328]: DEBUG oslo_vmware.api [None req-39496824-d7d3-42e2-a8f1-da97e3053f4e tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273758, 'name': PowerOffVM_Task, 'duration_secs': 0.321227} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.616602] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-39496824-d7d3-42e2-a8f1-da97e3053f4e tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 994.616824] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-39496824-d7d3-42e2-a8f1-da97e3053f4e tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 994.617428] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cf964cd0-e120-45b6-b538-5f60db398687 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.710389] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-39496824-d7d3-42e2-a8f1-da97e3053f4e tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 994.710636] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-39496824-d7d3-42e2-a8f1-da97e3053f4e tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 994.710927] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-39496824-d7d3-42e2-a8f1-da97e3053f4e tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Deleting the datastore file [datastore2] 51a9c492-6f91-4186-b550-ef12284b8a84 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 994.711125] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-33ad09d7-cfa8-4413-99a5-0807f4db66d0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.717653] env[69328]: INFO nova.compute.manager [None req-7d02ecea-29c6-42cc-978d-a1b74cecc92b tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Detaching volume 99338b52-3801-47a7-ab57-21495a480b27 [ 994.723826] env[69328]: DEBUG oslo_vmware.api [None req-39496824-d7d3-42e2-a8f1-da97e3053f4e tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 994.723826] env[69328]: value = "task-3273760" [ 994.723826] env[69328]: _type = "Task" [ 994.723826] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.740244] env[69328]: DEBUG oslo_vmware.api [None req-39496824-d7d3-42e2-a8f1-da97e3053f4e tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273760, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.762847] env[69328]: INFO nova.virt.block_device [None req-7d02ecea-29c6-42cc-978d-a1b74cecc92b tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Attempting to driver detach volume 99338b52-3801-47a7-ab57-21495a480b27 from mountpoint /dev/sdb [ 994.763169] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d02ecea-29c6-42cc-978d-a1b74cecc92b tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Volume detach. Driver type: vmdk {{(pid=69328) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 994.763373] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d02ecea-29c6-42cc-978d-a1b74cecc92b tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653755', 'volume_id': '99338b52-3801-47a7-ab57-21495a480b27', 'name': 'volume-99338b52-3801-47a7-ab57-21495a480b27', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'bc9c3a41-7264-4d69-bc15-397b5fa0a8ad', 'attached_at': '', 'detached_at': '', 'volume_id': '99338b52-3801-47a7-ab57-21495a480b27', 'serial': '99338b52-3801-47a7-ab57-21495a480b27'} {{(pid=69328) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 994.764365] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d1c7044-8884-4f8f-a6a0-f06e9e41401e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.796270] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e977a96-be97-4e83-9439-23e440b4ded8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.799378] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cb6a614-78c1-4166-aeff-cc1d0ccf96f1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.823935] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61c3e68a-bbbf-416f-b7f9-7d3d6befba45 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.830110] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd320977-afee-4893-8540-16f2604df951 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.839734] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Updating instance '96f604a9-e42c-4aa8-b5b5-edcb34901d94' progress to 83 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 994.866432] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d0eff45-3437-44e6-84e8-66c977d20dc5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.884501] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d02ecea-29c6-42cc-978d-a1b74cecc92b tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] The volume has not been displaced from its original location: [datastore1] volume-99338b52-3801-47a7-ab57-21495a480b27/volume-99338b52-3801-47a7-ab57-21495a480b27.vmdk. No consolidation needed. {{(pid=69328) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 994.890230] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d02ecea-29c6-42cc-978d-a1b74cecc92b tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Reconfiguring VM instance instance-00000018 to detach disk 2001 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 994.894211] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-06689512-1cc6-4dd2-953a-27ea0b553372 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.914461] env[69328]: DEBUG oslo_vmware.api [None req-7d02ecea-29c6-42cc-978d-a1b74cecc92b tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for the task: (returnval){ [ 994.914461] env[69328]: value = "task-3273761" [ 994.914461] env[69328]: _type = "Task" [ 994.914461] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.926368] env[69328]: DEBUG oslo_vmware.api [None req-7d02ecea-29c6-42cc-978d-a1b74cecc92b tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3273761, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.965541] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fbb03e9-477f-47e0-8aa1-2c6cde61dc8c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.976931] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f09875be-f279-43dd-a359-30ac5d48b8d8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.010470] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d302ccc5-084f-47c1-874b-e5b6bd46dcba {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.020201] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24c5a0a2-84b4-4f45-9ad8-8f6fc9da8fe0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.037635] env[69328]: DEBUG nova.compute.provider_tree [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 995.238746] env[69328]: DEBUG oslo_vmware.api [None req-39496824-d7d3-42e2-a8f1-da97e3053f4e tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273760, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.235788} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.238746] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-39496824-d7d3-42e2-a8f1-da97e3053f4e tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 995.238907] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-39496824-d7d3-42e2-a8f1-da97e3053f4e tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 995.239121] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-39496824-d7d3-42e2-a8f1-da97e3053f4e tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 995.239386] env[69328]: INFO nova.compute.manager [None req-39496824-d7d3-42e2-a8f1-da97e3053f4e tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Took 1.16 seconds to destroy the instance on the hypervisor. [ 995.239723] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-39496824-d7d3-42e2-a8f1-da97e3053f4e tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 995.239941] env[69328]: DEBUG nova.compute.manager [-] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 995.240051] env[69328]: DEBUG nova.network.neutron [-] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 995.365480] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 995.365806] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5976d2c0-a030-4e82-aede-de5f64b19f41 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.374670] env[69328]: DEBUG oslo_vmware.api [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 995.374670] env[69328]: value = "task-3273762" [ 995.374670] env[69328]: _type = "Task" [ 995.374670] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.384307] env[69328]: DEBUG oslo_vmware.api [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273762, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.426468] env[69328]: DEBUG oslo_vmware.api [None req-7d02ecea-29c6-42cc-978d-a1b74cecc92b tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3273761, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.541595] env[69328]: DEBUG nova.scheduler.client.report [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 995.591551] env[69328]: DEBUG nova.compute.manager [req-5a23509c-d62f-451c-83c5-0cb2194bec78 req-ece59590-5fdd-47b9-8f7e-64a3d41675aa service nova] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Received event network-vif-deleted-f7762174-4741-45a8-8a0e-8c4624ad29f6 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 995.591617] env[69328]: INFO nova.compute.manager [req-5a23509c-d62f-451c-83c5-0cb2194bec78 req-ece59590-5fdd-47b9-8f7e-64a3d41675aa service nova] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Neutron deleted interface f7762174-4741-45a8-8a0e-8c4624ad29f6; detaching it from the instance and deleting it from the info cache [ 995.591888] env[69328]: DEBUG nova.network.neutron [req-5a23509c-d62f-451c-83c5-0cb2194bec78 req-ece59590-5fdd-47b9-8f7e-64a3d41675aa service nova] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 995.788924] env[69328]: DEBUG nova.virt.hardware [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 995.789351] env[69328]: DEBUG nova.virt.hardware [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 995.789351] env[69328]: DEBUG nova.virt.hardware [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 995.789539] env[69328]: DEBUG nova.virt.hardware [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 995.789757] env[69328]: DEBUG nova.virt.hardware [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 995.790143] env[69328]: DEBUG nova.virt.hardware [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 995.790377] env[69328]: DEBUG nova.virt.hardware [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 995.790604] env[69328]: DEBUG nova.virt.hardware [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 995.790934] env[69328]: DEBUG nova.virt.hardware [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 995.791885] env[69328]: DEBUG nova.virt.hardware [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 995.791972] env[69328]: DEBUG nova.virt.hardware [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 995.797031] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-035d262c-5d5a-4f79-9c51-639861de9a22 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.807172] env[69328]: DEBUG nova.virt.hardware [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 995.807389] env[69328]: DEBUG nova.virt.hardware [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 995.807543] env[69328]: DEBUG nova.virt.hardware [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 995.807723] env[69328]: DEBUG nova.virt.hardware [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 995.807871] env[69328]: DEBUG nova.virt.hardware [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 995.808026] env[69328]: DEBUG nova.virt.hardware [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 995.808238] env[69328]: DEBUG nova.virt.hardware [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 995.808438] env[69328]: DEBUG nova.virt.hardware [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 995.808593] env[69328]: DEBUG nova.virt.hardware [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 995.808833] env[69328]: DEBUG nova.virt.hardware [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 995.809039] env[69328]: DEBUG nova.virt.hardware [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 995.810364] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06c39c24-33e1-42e2-9a76-51c85691bc61 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.816031] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f43a8ba9-be03-4b60-965d-d9ee2bf4e3dd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.834960] env[69328]: DEBUG oslo_vmware.rw_handles [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ee239d-5169-dbcf-9db8-a7d1898b740d/disk-0.vmdk. {{(pid=69328) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 995.836318] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3d69c4b-8c60-4fdc-9d13-0c55614f7f3e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.840627] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f6:20:64', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '62237242-7ce2-4664-a1c5-6783b516b507', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b7d14440-051d-478f-8bda-be652bd1f72f', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 995.848557] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 995.849396] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5de7a2f6-aa54-4664-bbce-154d7245a17d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.852128] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 995.852740] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6143edc3-937d-4e00-bc3b-ec1d0f707c60 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.876622] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ce:7b:87', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '61b8f0db-488e-42d7-bf6c-6c1665cd5616', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3b413041-b9e3-47e2-a4f8-f828e31f079a', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 995.884270] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Creating folder: Project (38528276c7744d798af4057d29c88ddb). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 995.889727] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c0cf2eef-7ae8-436e-a8a4-056de175e96d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.891468] env[69328]: DEBUG oslo_vmware.rw_handles [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ee239d-5169-dbcf-9db8-a7d1898b740d/disk-0.vmdk is in state: ready. {{(pid=69328) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 995.891653] env[69328]: ERROR oslo_vmware.rw_handles [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ee239d-5169-dbcf-9db8-a7d1898b740d/disk-0.vmdk due to incomplete transfer. [ 995.894157] env[69328]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-827997d6-735e-4194-86ae-ea96e611f366 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.897985] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 995.897985] env[69328]: value = "task-3273763" [ 995.897985] env[69328]: _type = "Task" [ 995.897985] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.905313] env[69328]: DEBUG oslo_vmware.api [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273762, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.906929] env[69328]: DEBUG oslo_vmware.rw_handles [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52ee239d-5169-dbcf-9db8-a7d1898b740d/disk-0.vmdk. {{(pid=69328) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 995.907166] env[69328]: DEBUG nova.virt.vmwareapi.images [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Uploaded image 83b1e553-81a0-4dcf-a9f7-df6e5e0289ab to the Glance image server {{(pid=69328) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 995.909591] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Destroying the VM {{(pid=69328) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 995.913981] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-f4e3c975-a883-4cbb-8dec-f495696ae84d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.915623] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273763, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.915931] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Created folder: Project (38528276c7744d798af4057d29c88ddb) in parent group-v653649. [ 995.916097] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Creating folder: Instances. Parent ref: group-v653893. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 995.916760] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1c265e62-afb9-4967-b110-7ec5a9d1a46f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.923774] env[69328]: DEBUG oslo_vmware.api [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 995.923774] env[69328]: value = "task-3273765" [ 995.923774] env[69328]: _type = "Task" [ 995.923774] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.930095] env[69328]: DEBUG oslo_vmware.api [None req-7d02ecea-29c6-42cc-978d-a1b74cecc92b tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3273761, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.934974] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Created folder: Instances in parent group-v653893. [ 995.935255] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 995.938292] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 995.938547] env[69328]: DEBUG oslo_vmware.api [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273765, 'name': Destroy_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.938757] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f4de238a-a73b-41a6-b2e0-41de9eb59db2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.960931] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 995.960931] env[69328]: value = "task-3273767" [ 995.960931] env[69328]: _type = "Task" [ 995.960931] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.971066] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273767, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.995629] env[69328]: DEBUG nova.network.neutron [-] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 996.051173] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.559s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 996.051173] env[69328]: DEBUG nova.compute.manager [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 996.052982] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 13.177s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 996.053459] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 996.054072] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69328) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 996.054551] env[69328]: DEBUG oslo_concurrency.lockutils [None req-65bf4397-837d-43f7-abf5-96e2a73bb4ae tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.549s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 996.054915] env[69328]: DEBUG nova.objects.instance [None req-65bf4397-837d-43f7-abf5-96e2a73bb4ae tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Lazy-loading 'resources' on Instance uuid 772ab9b3-23ac-46c6-acb1-af0b2726fd90 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 996.057492] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3081438b-d7ea-4abb-a77f-9e45642c7f3c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.069296] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ad9cb66-894b-494d-97a2-228bbd2d7781 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.092180] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d29b5627-87be-4cac-8f01-3ac31cad016d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.095518] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4a84a762-ddd5-47ec-bf7a-f7f2bf4a03ec {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.107698] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37ed9332-a0f8-4b15-ad58-afb2cfcb9192 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.114268] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23ef91f8-b4e3-4b01-9c78-dc68324d7820 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.160793] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178754MB free_disk=115GB free_vcpus=48 pci_devices=None {{(pid=69328) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 996.160961] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 996.176942] env[69328]: DEBUG nova.compute.manager [req-5a23509c-d62f-451c-83c5-0cb2194bec78 req-ece59590-5fdd-47b9-8f7e-64a3d41675aa service nova] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Detach interface failed, port_id=f7762174-4741-45a8-8a0e-8c4624ad29f6, reason: Instance 51a9c492-6f91-4186-b550-ef12284b8a84 could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 996.391945] env[69328]: DEBUG oslo_vmware.api [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273762, 'name': PowerOnVM_Task, 'duration_secs': 0.624921} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.392055] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 996.392273] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2a6bff58-6914-4759-8b36-351d7dd3ee85 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Updating instance '96f604a9-e42c-4aa8-b5b5-edcb34901d94' progress to 100 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 996.409247] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273763, 'name': CreateVM_Task} progress is 25%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.427792] env[69328]: DEBUG oslo_vmware.api [None req-7d02ecea-29c6-42cc-978d-a1b74cecc92b tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3273761, 'name': ReconfigVM_Task, 'duration_secs': 1.355957} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.431903] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d02ecea-29c6-42cc-978d-a1b74cecc92b tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Reconfigured VM instance instance-00000018 to detach disk 2001 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 996.436926] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ce107b2c-c5d3-4409-9848-d0cc69454a6d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.456617] env[69328]: DEBUG oslo_vmware.api [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273765, 'name': Destroy_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.459037] env[69328]: DEBUG oslo_vmware.api [None req-7d02ecea-29c6-42cc-978d-a1b74cecc92b tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for the task: (returnval){ [ 996.459037] env[69328]: value = "task-3273768" [ 996.459037] env[69328]: _type = "Task" [ 996.459037] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.476526] env[69328]: DEBUG oslo_vmware.api [None req-7d02ecea-29c6-42cc-978d-a1b74cecc92b tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3273768, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.482374] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273767, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.490760] env[69328]: DEBUG oslo_vmware.rw_handles [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52842b21-fb0c-9b7e-108a-2c49e2ad5b0b/disk-0.vmdk. {{(pid=69328) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 996.492115] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f1532ae-bd2a-4f7f-a2af-79458b6aeed5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.499943] env[69328]: INFO nova.compute.manager [-] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Took 1.26 seconds to deallocate network for instance. [ 996.500474] env[69328]: DEBUG oslo_vmware.rw_handles [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52842b21-fb0c-9b7e-108a-2c49e2ad5b0b/disk-0.vmdk is in state: ready. {{(pid=69328) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 996.500627] env[69328]: ERROR oslo_vmware.rw_handles [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52842b21-fb0c-9b7e-108a-2c49e2ad5b0b/disk-0.vmdk due to incomplete transfer. [ 996.502398] env[69328]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-16fd0300-c900-4539-9591-88da8b4ab9d8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.515579] env[69328]: DEBUG oslo_vmware.rw_handles [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52842b21-fb0c-9b7e-108a-2c49e2ad5b0b/disk-0.vmdk. {{(pid=69328) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 996.515811] env[69328]: DEBUG nova.virt.vmwareapi.images [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Uploaded image 8868d8b6-e8a6-4c40-9bca-fb6ec2c24443 to the Glance image server {{(pid=69328) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 996.518253] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Destroying the VM {{(pid=69328) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 996.519083] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-822da918-264d-4591-8ea5-5ebfa4755269 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.528744] env[69328]: DEBUG oslo_vmware.api [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Waiting for the task: (returnval){ [ 996.528744] env[69328]: value = "task-3273769" [ 996.528744] env[69328]: _type = "Task" [ 996.528744] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.539143] env[69328]: DEBUG oslo_vmware.api [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273769, 'name': Destroy_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.558307] env[69328]: DEBUG nova.compute.utils [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 996.563844] env[69328]: DEBUG nova.compute.manager [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 996.564073] env[69328]: DEBUG nova.network.neutron [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 996.641840] env[69328]: DEBUG nova.policy [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a8fbe2a134194d29af48ac8e4986d0cb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd86de4d5055642aa86a29c6768e3db46', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 996.844306] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "dc050589-e37a-4798-9532-df4ecfab7eb1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 996.844306] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "dc050589-e37a-4798-9532-df4ecfab7eb1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 996.912975] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273763, 'name': CreateVM_Task, 'duration_secs': 0.721314} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.913754] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 996.914594] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.914799] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 996.915182] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 996.915725] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-876deb6b-8d68-4d1c-ad62-402af6c8c772 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.923330] env[69328]: DEBUG oslo_vmware.api [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 996.923330] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]528ea198-34e4-6d96-d4b5-15713197e5d2" [ 996.923330] env[69328]: _type = "Task" [ 996.923330] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.935143] env[69328]: DEBUG oslo_vmware.api [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]528ea198-34e4-6d96-d4b5-15713197e5d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.943110] env[69328]: DEBUG oslo_vmware.api [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273765, 'name': Destroy_Task, 'duration_secs': 0.836993} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.943383] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Destroyed the VM [ 996.943897] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Deleting Snapshot of the VM instance {{(pid=69328) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 996.943897] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-681979f6-add1-45c9-b8a2-4d11d834045b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.950744] env[69328]: DEBUG oslo_vmware.api [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 996.950744] env[69328]: value = "task-3273770" [ 996.950744] env[69328]: _type = "Task" [ 996.950744] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.959183] env[69328]: DEBUG oslo_vmware.api [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273770, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.962897] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05bfe4d8-4b8a-4409-b038-4a63113544c0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.982995] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c088f76b-9814-4db6-88b5-73b67bcf30a5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.986228] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273767, 'name': CreateVM_Task, 'duration_secs': 0.594644} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.986560] env[69328]: DEBUG oslo_vmware.api [None req-7d02ecea-29c6-42cc-978d-a1b74cecc92b tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3273768, 'name': ReconfigVM_Task, 'duration_secs': 0.216942} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.987063] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 996.987375] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d02ecea-29c6-42cc-978d-a1b74cecc92b tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653755', 'volume_id': '99338b52-3801-47a7-ab57-21495a480b27', 'name': 'volume-99338b52-3801-47a7-ab57-21495a480b27', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'bc9c3a41-7264-4d69-bc15-397b5fa0a8ad', 'attached_at': '', 'detached_at': '', 'volume_id': '99338b52-3801-47a7-ab57-21495a480b27', 'serial': '99338b52-3801-47a7-ab57-21495a480b27'} {{(pid=69328) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 996.990352] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 997.024661] env[69328]: DEBUG oslo_concurrency.lockutils [None req-39496824-d7d3-42e2-a8f1-da97e3053f4e tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 997.025793] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa1cb493-e770-4fb5-b0b8-a8a77788e1dc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.035693] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4db807e-3ade-40d4-9d7e-46aaa8334f4c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.043126] env[69328]: DEBUG oslo_vmware.api [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273769, 'name': Destroy_Task, 'duration_secs': 0.404473} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.043770] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Destroyed the VM [ 997.044023] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Deleting Snapshot of the VM instance {{(pid=69328) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 997.044289] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-ec2ae50a-6fbd-4d7c-990a-d83b80db360c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.054985] env[69328]: DEBUG nova.compute.provider_tree [None req-65bf4397-837d-43f7-abf5-96e2a73bb4ae tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 997.063563] env[69328]: DEBUG oslo_vmware.api [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Waiting for the task: (returnval){ [ 997.063563] env[69328]: value = "task-3273771" [ 997.063563] env[69328]: _type = "Task" [ 997.063563] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.066180] env[69328]: DEBUG nova.compute.manager [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 997.075443] env[69328]: DEBUG oslo_vmware.api [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273771, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.116027] env[69328]: DEBUG nova.network.neutron [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] Successfully created port: cbad07b8-acca-4410-abd7-78b9b5a05849 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 997.348215] env[69328]: DEBUG nova.compute.manager [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 997.438246] env[69328]: DEBUG oslo_vmware.api [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]528ea198-34e4-6d96-d4b5-15713197e5d2, 'name': SearchDatastore_Task, 'duration_secs': 0.020442} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.438246] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 997.438246] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 997.438668] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 997.438812] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 997.439064] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 997.439385] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 997.439691] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 997.440323] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ae15783a-8718-43d2-8c21-a727dac2d156 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.445476] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-645fd3ed-7c6c-42b1-9a17-ef7f20f49ca4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.451402] env[69328]: DEBUG oslo_vmware.api [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Waiting for the task: (returnval){ [ 997.451402] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5207aa90-32c7-1ead-1e7f-abbb7d5739a1" [ 997.451402] env[69328]: _type = "Task" [ 997.451402] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.461016] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 997.461264] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 997.462630] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0833fa89-160e-43cd-9141-5fa32a8ae740 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.475337] env[69328]: DEBUG oslo_vmware.api [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5207aa90-32c7-1ead-1e7f-abbb7d5739a1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.475532] env[69328]: DEBUG oslo_vmware.api [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273770, 'name': RemoveSnapshot_Task, 'duration_secs': 0.455195} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.476241] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Deleted Snapshot of the VM instance {{(pid=69328) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 997.476492] env[69328]: DEBUG nova.compute.manager [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 997.477313] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c46e857-1381-4234-ad92-a872a49b155e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.481434] env[69328]: DEBUG oslo_vmware.api [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 997.481434] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]529585c7-e8f9-b979-52f2-f3d62063d063" [ 997.481434] env[69328]: _type = "Task" [ 997.481434] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.495504] env[69328]: DEBUG oslo_vmware.api [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]529585c7-e8f9-b979-52f2-f3d62063d063, 'name': SearchDatastore_Task, 'duration_secs': 0.01116} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.496815] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dfda13d0-c36b-4c89-abd9-12102d13182a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.503718] env[69328]: DEBUG oslo_vmware.api [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 997.503718] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]520f6e69-6209-ca8f-f578-570878f7235b" [ 997.503718] env[69328]: _type = "Task" [ 997.503718] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.513842] env[69328]: DEBUG oslo_vmware.api [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]520f6e69-6209-ca8f-f578-570878f7235b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.548654] env[69328]: DEBUG nova.objects.instance [None req-7d02ecea-29c6-42cc-978d-a1b74cecc92b tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lazy-loading 'flavor' on Instance uuid bc9c3a41-7264-4d69-bc15-397b5fa0a8ad {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 997.558790] env[69328]: DEBUG nova.scheduler.client.report [None req-65bf4397-837d-43f7-abf5-96e2a73bb4ae tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 997.579298] env[69328]: DEBUG oslo_vmware.api [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273771, 'name': RemoveSnapshot_Task, 'duration_secs': 0.467316} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.580435] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Deleted Snapshot of the VM instance {{(pid=69328) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 997.580435] env[69328]: DEBUG nova.compute.manager [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 997.581487] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81df3b31-ac55-4367-9f76-f4effc1d73cf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.699704] env[69328]: DEBUG oslo_concurrency.lockutils [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Acquiring lock "dd43adb3-b073-483a-81dd-69df7f746874" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 997.699927] env[69328]: DEBUG oslo_concurrency.lockutils [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Lock "dd43adb3-b073-483a-81dd-69df7f746874" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 997.871999] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 997.929590] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Acquiring lock "6b9757de-a274-4f4d-9b73-cc2ca92b4732" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 997.929857] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Lock "6b9757de-a274-4f4d-9b73-cc2ca92b4732" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 997.967025] env[69328]: DEBUG oslo_vmware.api [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5207aa90-32c7-1ead-1e7f-abbb7d5739a1, 'name': SearchDatastore_Task, 'duration_secs': 0.021256} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.967025] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 997.967025] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 997.967025] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 997.999869] env[69328]: INFO nova.compute.manager [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Shelve offloading [ 998.015624] env[69328]: DEBUG oslo_vmware.api [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]520f6e69-6209-ca8f-f578-570878f7235b, 'name': SearchDatastore_Task, 'duration_secs': 0.014374} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.015893] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 998.016309] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 9f6f8e97-cb21-4984-af08-a63ea4578eef/9f6f8e97-cb21-4984-af08-a63ea4578eef.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 998.016447] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 998.016635] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 998.016842] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ed8208b2-a433-48b2-8168-f044d8142d13 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.018807] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-035e3dfd-3531-48fb-b5a4-3dfbc9e47f89 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.026751] env[69328]: DEBUG oslo_vmware.api [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 998.026751] env[69328]: value = "task-3273772" [ 998.026751] env[69328]: _type = "Task" [ 998.026751] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.031142] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 998.031349] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 998.032483] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-003916f0-68db-4b2a-a013-b0ddb0948f6a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.037888] env[69328]: DEBUG oslo_vmware.api [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273772, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.041238] env[69328]: DEBUG oslo_vmware.api [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Waiting for the task: (returnval){ [ 998.041238] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b8430c-76f4-6b77-92bb-d873e4184c95" [ 998.041238] env[69328]: _type = "Task" [ 998.041238] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.049045] env[69328]: DEBUG oslo_vmware.api [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b8430c-76f4-6b77-92bb-d873e4184c95, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.069801] env[69328]: DEBUG oslo_concurrency.lockutils [None req-65bf4397-837d-43f7-abf5-96e2a73bb4ae tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.015s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 998.074625] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 11.382s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 998.074930] env[69328]: DEBUG nova.objects.instance [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69328) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 998.081344] env[69328]: DEBUG nova.compute.manager [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 998.091406] env[69328]: INFO nova.scheduler.client.report [None req-65bf4397-837d-43f7-abf5-96e2a73bb4ae tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Deleted allocations for instance 772ab9b3-23ac-46c6-acb1-af0b2726fd90 [ 998.098410] env[69328]: INFO nova.compute.manager [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Shelve offloading [ 998.116026] env[69328]: DEBUG nova.virt.hardware [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 998.116395] env[69328]: DEBUG nova.virt.hardware [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 998.116575] env[69328]: DEBUG nova.virt.hardware [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 998.116759] env[69328]: DEBUG nova.virt.hardware [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 998.116904] env[69328]: DEBUG nova.virt.hardware [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 998.117436] env[69328]: DEBUG nova.virt.hardware [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 998.117436] env[69328]: DEBUG nova.virt.hardware [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 998.117436] env[69328]: DEBUG nova.virt.hardware [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 998.117607] env[69328]: DEBUG nova.virt.hardware [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 998.117736] env[69328]: DEBUG nova.virt.hardware [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 998.117905] env[69328]: DEBUG nova.virt.hardware [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 998.120230] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94f2e456-9aef-4b4c-9662-fad71b412513 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.129462] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3f5ef26-1606-43e9-84a8-a760aaf7d028 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.202685] env[69328]: DEBUG nova.compute.manager [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 998.220277] env[69328]: DEBUG oslo_concurrency.lockutils [None req-dda2c9ea-ab23-4526-ba73-3282180787b9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "96f604a9-e42c-4aa8-b5b5-edcb34901d94" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 998.220562] env[69328]: DEBUG oslo_concurrency.lockutils [None req-dda2c9ea-ab23-4526-ba73-3282180787b9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "96f604a9-e42c-4aa8-b5b5-edcb34901d94" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 998.220835] env[69328]: DEBUG nova.compute.manager [None req-dda2c9ea-ab23-4526-ba73-3282180787b9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Going to confirm migration 4 {{(pid=69328) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 998.324241] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Acquiring lock "19f537b7-90fc-4832-b137-e042e00a508b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 998.324563] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Lock "19f537b7-90fc-4832-b137-e042e00a508b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 998.431747] env[69328]: DEBUG nova.compute.manager [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 6b9757de-a274-4f4d-9b73-cc2ca92b4732] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 998.504393] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 998.505141] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a0e1577a-a720-4580-81d6-121d2c134005 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.513781] env[69328]: DEBUG oslo_vmware.api [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 998.513781] env[69328]: value = "task-3273773" [ 998.513781] env[69328]: _type = "Task" [ 998.513781] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.523461] env[69328]: DEBUG oslo_vmware.api [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273773, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.535804] env[69328]: DEBUG oslo_vmware.api [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273772, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.490659} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.536082] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 9f6f8e97-cb21-4984-af08-a63ea4578eef/9f6f8e97-cb21-4984-af08-a63ea4578eef.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 998.536311] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 998.536547] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5fa4e4f4-8672-4d70-a248-c0f2840940fc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.546964] env[69328]: DEBUG oslo_vmware.api [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 998.546964] env[69328]: value = "task-3273774" [ 998.546964] env[69328]: _type = "Task" [ 998.546964] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.558089] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7d02ecea-29c6-42cc-978d-a1b74cecc92b tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "bc9c3a41-7264-4d69-bc15-397b5fa0a8ad" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.345s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 998.560051] env[69328]: DEBUG oslo_vmware.api [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b8430c-76f4-6b77-92bb-d873e4184c95, 'name': SearchDatastore_Task, 'duration_secs': 0.009463} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.563046] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8774c0c0-c98d-481c-b56f-eeec75cda2fa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.566482] env[69328]: DEBUG oslo_vmware.api [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273774, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.570647] env[69328]: DEBUG oslo_vmware.api [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Waiting for the task: (returnval){ [ 998.570647] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5264cc41-0637-3f71-fed7-afcf19755018" [ 998.570647] env[69328]: _type = "Task" [ 998.570647] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.588497] env[69328]: DEBUG oslo_vmware.api [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5264cc41-0637-3f71-fed7-afcf19755018, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.600311] env[69328]: DEBUG oslo_concurrency.lockutils [None req-65bf4397-837d-43f7-abf5-96e2a73bb4ae tempest-ServerTagsTestJSON-2045182235 tempest-ServerTagsTestJSON-2045182235-project-member] Lock "772ab9b3-23ac-46c6-acb1-af0b2726fd90" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.680s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 998.602053] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 998.602575] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f1cd913e-27db-4dc1-a383-23d2e8778b86 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.610809] env[69328]: DEBUG oslo_vmware.api [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Waiting for the task: (returnval){ [ 998.610809] env[69328]: value = "task-3273775" [ 998.610809] env[69328]: _type = "Task" [ 998.610809] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.622178] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] VM already powered off {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 998.622534] env[69328]: DEBUG nova.compute.manager [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 998.623737] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7f1f69e-6bc5-4175-a293-3309c5f42f41 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.633798] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Acquiring lock "refresh_cache-a0952fdf-5570-4112-bc4d-e9f9cee1599c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 998.633882] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Acquired lock "refresh_cache-a0952fdf-5570-4112-bc4d-e9f9cee1599c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 998.634101] env[69328]: DEBUG nova.network.neutron [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 998.714466] env[69328]: DEBUG nova.compute.manager [req-ebbcaf72-d5a3-41d4-a2df-d15070df27cc req-7898050d-191a-4cf7-bf15-176fb8d9c913 service nova] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] Received event network-vif-plugged-cbad07b8-acca-4410-abd7-78b9b5a05849 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 998.714749] env[69328]: DEBUG oslo_concurrency.lockutils [req-ebbcaf72-d5a3-41d4-a2df-d15070df27cc req-7898050d-191a-4cf7-bf15-176fb8d9c913 service nova] Acquiring lock "a7d4893f-31d4-449d-96d5-a2a1377d8454-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 998.714973] env[69328]: DEBUG oslo_concurrency.lockutils [req-ebbcaf72-d5a3-41d4-a2df-d15070df27cc req-7898050d-191a-4cf7-bf15-176fb8d9c913 service nova] Lock "a7d4893f-31d4-449d-96d5-a2a1377d8454-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 998.715138] env[69328]: DEBUG oslo_concurrency.lockutils [req-ebbcaf72-d5a3-41d4-a2df-d15070df27cc req-7898050d-191a-4cf7-bf15-176fb8d9c913 service nova] Lock "a7d4893f-31d4-449d-96d5-a2a1377d8454-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 998.715355] env[69328]: DEBUG nova.compute.manager [req-ebbcaf72-d5a3-41d4-a2df-d15070df27cc req-7898050d-191a-4cf7-bf15-176fb8d9c913 service nova] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] No waiting events found dispatching network-vif-plugged-cbad07b8-acca-4410-abd7-78b9b5a05849 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 998.715551] env[69328]: WARNING nova.compute.manager [req-ebbcaf72-d5a3-41d4-a2df-d15070df27cc req-7898050d-191a-4cf7-bf15-176fb8d9c913 service nova] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] Received unexpected event network-vif-plugged-cbad07b8-acca-4410-abd7-78b9b5a05849 for instance with vm_state building and task_state spawning. [ 998.732520] env[69328]: DEBUG oslo_concurrency.lockutils [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 998.760350] env[69328]: DEBUG oslo_concurrency.lockutils [None req-dda2c9ea-ab23-4526-ba73-3282180787b9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "refresh_cache-96f604a9-e42c-4aa8-b5b5-edcb34901d94" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 998.760540] env[69328]: DEBUG oslo_concurrency.lockutils [None req-dda2c9ea-ab23-4526-ba73-3282180787b9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquired lock "refresh_cache-96f604a9-e42c-4aa8-b5b5-edcb34901d94" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 998.760723] env[69328]: DEBUG nova.network.neutron [None req-dda2c9ea-ab23-4526-ba73-3282180787b9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 998.760911] env[69328]: DEBUG nova.objects.instance [None req-dda2c9ea-ab23-4526-ba73-3282180787b9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lazy-loading 'info_cache' on Instance uuid 96f604a9-e42c-4aa8-b5b5-edcb34901d94 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 998.811450] env[69328]: DEBUG nova.network.neutron [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] Successfully updated port: cbad07b8-acca-4410-abd7-78b9b5a05849 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 998.826866] env[69328]: DEBUG nova.compute.manager [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 19f537b7-90fc-4832-b137-e042e00a508b] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 998.951584] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 998.962669] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f7fd4c57-f252-4c6e-a15f-9c4426398d79 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquiring lock "bc9c3a41-7264-4d69-bc15-397b5fa0a8ad" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 998.962669] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f7fd4c57-f252-4c6e-a15f-9c4426398d79 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "bc9c3a41-7264-4d69-bc15-397b5fa0a8ad" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 998.962669] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f7fd4c57-f252-4c6e-a15f-9c4426398d79 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquiring lock "bc9c3a41-7264-4d69-bc15-397b5fa0a8ad-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 998.962896] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f7fd4c57-f252-4c6e-a15f-9c4426398d79 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "bc9c3a41-7264-4d69-bc15-397b5fa0a8ad-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 998.963125] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f7fd4c57-f252-4c6e-a15f-9c4426398d79 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "bc9c3a41-7264-4d69-bc15-397b5fa0a8ad-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 998.965289] env[69328]: INFO nova.compute.manager [None req-f7fd4c57-f252-4c6e-a15f-9c4426398d79 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Terminating instance [ 999.024813] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] VM already powered off {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 999.025170] env[69328]: DEBUG nova.compute.manager [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 999.025992] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f331a49-525c-4bea-86c0-42bcae434634 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.033217] env[69328]: DEBUG oslo_concurrency.lockutils [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Acquiring lock "refresh_cache-76210566-12d7-4f6a-afa1-6329e87e0f85" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.033217] env[69328]: DEBUG oslo_concurrency.lockutils [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Acquired lock "refresh_cache-76210566-12d7-4f6a-afa1-6329e87e0f85" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 999.033217] env[69328]: DEBUG nova.network.neutron [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 999.057610] env[69328]: DEBUG oslo_vmware.api [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273774, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071583} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.057884] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 999.058666] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaeaa343-1d02-4482-8f97-7052c1225a08 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.082293] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] Reconfiguring VM instance instance-00000059 to attach disk [datastore1] 9f6f8e97-cb21-4984-af08-a63ea4578eef/9f6f8e97-cb21-4984-af08-a63ea4578eef.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 999.087398] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c87e99e4-f2f2-46bf-8983-62aded5073d1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.102379] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8d558a5a-0c98-4ff9-8ad9-7513913b79a0 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.028s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 999.107187] env[69328]: DEBUG oslo_concurrency.lockutils [None req-97563d6f-f1dd-4a10-8a2a-3d9aabc95961 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.142s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 999.107438] env[69328]: DEBUG nova.objects.instance [None req-97563d6f-f1dd-4a10-8a2a-3d9aabc95961 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lazy-loading 'resources' on Instance uuid b61436f5-0e8b-4da5-9459-cf9487dfd23f {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 999.113030] env[69328]: DEBUG oslo_vmware.api [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5264cc41-0637-3f71-fed7-afcf19755018, 'name': SearchDatastore_Task, 'duration_secs': 0.010457} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.119524] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 999.119804] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 65fccb3f-5e0e-4140-be0a-5ba20f494d50/65fccb3f-5e0e-4140-be0a-5ba20f494d50.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 999.120561] env[69328]: DEBUG oslo_vmware.api [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 999.120561] env[69328]: value = "task-3273776" [ 999.120561] env[69328]: _type = "Task" [ 999.120561] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.120771] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b5dd689d-6729-413b-8dcc-ef59a65c3f30 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.133791] env[69328]: DEBUG oslo_vmware.api [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273776, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.135193] env[69328]: DEBUG oslo_vmware.api [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Waiting for the task: (returnval){ [ 999.135193] env[69328]: value = "task-3273777" [ 999.135193] env[69328]: _type = "Task" [ 999.135193] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.146279] env[69328]: DEBUG oslo_vmware.api [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Task: {'id': task-3273777, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.313239] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "refresh_cache-a7d4893f-31d4-449d-96d5-a2a1377d8454" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.313436] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquired lock "refresh_cache-a7d4893f-31d4-449d-96d5-a2a1377d8454" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 999.313574] env[69328]: DEBUG nova.network.neutron [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 999.353853] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 999.469565] env[69328]: DEBUG nova.compute.manager [None req-f7fd4c57-f252-4c6e-a15f-9c4426398d79 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 999.469819] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f7fd4c57-f252-4c6e-a15f-9c4426398d79 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 999.471057] env[69328]: DEBUG nova.network.neutron [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Updating instance_info_cache with network_info: [{"id": "c74af0b7-ebfb-4563-9208-a18235899a6c", "address": "fa:16:3e:35:bb:fc", "network": {"id": "cc75e08f-f0f3-4b52-9b40-0de73f044554", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1326858830-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1393040bf5304571ae4b66d0a4ee7b6e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc74af0b7-eb", "ovs_interfaceid": "c74af0b7-ebfb-4563-9208-a18235899a6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 999.476033] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a7a054b-f104-4b9c-8313-e5c7aac8dfee {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.483862] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7fd4c57-f252-4c6e-a15f-9c4426398d79 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 999.484224] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-80e66af3-4f48-4c0e-9bde-1ddc2cda181c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.492830] env[69328]: DEBUG oslo_vmware.api [None req-f7fd4c57-f252-4c6e-a15f-9c4426398d79 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for the task: (returnval){ [ 999.492830] env[69328]: value = "task-3273778" [ 999.492830] env[69328]: _type = "Task" [ 999.492830] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.504645] env[69328]: DEBUG oslo_vmware.api [None req-f7fd4c57-f252-4c6e-a15f-9c4426398d79 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3273778, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.646729] env[69328]: DEBUG oslo_vmware.api [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273776, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.662360] env[69328]: DEBUG oslo_vmware.api [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Task: {'id': task-3273777, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.869105] env[69328]: DEBUG nova.network.neutron [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 999.979410] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Releasing lock "refresh_cache-a0952fdf-5570-4112-bc4d-e9f9cee1599c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 999.981867] env[69328]: DEBUG nova.network.neutron [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Updating instance_info_cache with network_info: [{"id": "a3cab44b-0572-4007-bab9-e84ba084f70a", "address": "fa:16:3e:02:7d:25", "network": {"id": "3be6b159-5d5c-4752-b79d-0ed6110569d0", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-915151552-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.128", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f50ac50ef6ae4abc83a8064746de7029", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe20ef0e-0991-44d7-887d-08dddac0b56b", "external-id": "nsx-vlan-transportzone-991", "segmentation_id": 991, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3cab44b-05", "ovs_interfaceid": "a3cab44b-0572-4007-bab9-e84ba084f70a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1000.007699] env[69328]: DEBUG oslo_vmware.api [None req-f7fd4c57-f252-4c6e-a15f-9c4426398d79 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3273778, 'name': PowerOffVM_Task, 'duration_secs': 0.222053} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.008240] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7fd4c57-f252-4c6e-a15f-9c4426398d79 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1000.008531] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f7fd4c57-f252-4c6e-a15f-9c4426398d79 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1000.009071] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ad9a4a6f-157c-4ca3-b296-bd110ef8c6c4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.027314] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e9a76bc-a467-4c80-a45e-c53659ea38f6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.033976] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec497bb0-9208-45df-b4cc-fc483eccf611 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.067561] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5755cc1d-e1c6-4c0a-a1a4-f90f9c2badb0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.076262] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-602cade4-04ec-4463-bf23-a374cd536828 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.097279] env[69328]: DEBUG nova.compute.provider_tree [None req-97563d6f-f1dd-4a10-8a2a-3d9aabc95961 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1000.100829] env[69328]: DEBUG nova.network.neutron [None req-dda2c9ea-ab23-4526-ba73-3282180787b9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Updating instance_info_cache with network_info: [{"id": "d97e62a9-59f8-4f3b-9296-f5a0803d2b10", "address": "fa:16:3e:81:01:fa", "network": {"id": "e2ab6957-2c9d-4b95-91bd-5a62d9a284ba", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-967470666-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75d5853e3c724d02bacfa75173e38ab3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abcf0d10-3f3f-45dc-923e-1c78766e2dad", "external-id": "nsx-vlan-transportzone-405", "segmentation_id": 405, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd97e62a9-59", "ovs_interfaceid": "d97e62a9-59f8-4f3b-9296-f5a0803d2b10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1000.102300] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f7fd4c57-f252-4c6e-a15f-9c4426398d79 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1000.102633] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f7fd4c57-f252-4c6e-a15f-9c4426398d79 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1000.102972] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7fd4c57-f252-4c6e-a15f-9c4426398d79 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Deleting the datastore file [datastore2] bc9c3a41-7264-4d69-bc15-397b5fa0a8ad {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1000.103537] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a290f1dc-f2db-4c6e-98a3-8b28496cdf2a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.113373] env[69328]: DEBUG oslo_vmware.api [None req-f7fd4c57-f252-4c6e-a15f-9c4426398d79 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for the task: (returnval){ [ 1000.113373] env[69328]: value = "task-3273780" [ 1000.113373] env[69328]: _type = "Task" [ 1000.113373] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.121902] env[69328]: DEBUG oslo_vmware.api [None req-f7fd4c57-f252-4c6e-a15f-9c4426398d79 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3273780, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.135322] env[69328]: DEBUG oslo_vmware.api [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273776, 'name': ReconfigVM_Task, 'duration_secs': 0.693046} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.135813] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] Reconfigured VM instance instance-00000059 to attach disk [datastore1] 9f6f8e97-cb21-4984-af08-a63ea4578eef/9f6f8e97-cb21-4984-af08-a63ea4578eef.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1000.136689] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6f7a2071-79a5-46fc-9af1-cf24daa9cbbb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.149137] env[69328]: DEBUG oslo_vmware.api [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Task: {'id': task-3273777, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.58601} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.150479] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 65fccb3f-5e0e-4140-be0a-5ba20f494d50/65fccb3f-5e0e-4140-be0a-5ba20f494d50.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1000.150707] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1000.151043] env[69328]: DEBUG oslo_vmware.api [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 1000.151043] env[69328]: value = "task-3273781" [ 1000.151043] env[69328]: _type = "Task" [ 1000.151043] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.151305] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9104dbd7-b2c2-4443-b469-937973ed0afc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.165182] env[69328]: DEBUG oslo_vmware.api [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273781, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.166729] env[69328]: DEBUG oslo_vmware.api [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Waiting for the task: (returnval){ [ 1000.166729] env[69328]: value = "task-3273782" [ 1000.166729] env[69328]: _type = "Task" [ 1000.166729] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.175245] env[69328]: DEBUG oslo_vmware.api [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Task: {'id': task-3273782, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.363362] env[69328]: DEBUG nova.compute.manager [req-769ee96d-7216-4de5-a85f-4e5258b838d1 req-daa7dd63-c781-4325-b8d4-41c83dc8cf8a service nova] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Received event network-vif-unplugged-c74af0b7-ebfb-4563-9208-a18235899a6c {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1000.363596] env[69328]: DEBUG oslo_concurrency.lockutils [req-769ee96d-7216-4de5-a85f-4e5258b838d1 req-daa7dd63-c781-4325-b8d4-41c83dc8cf8a service nova] Acquiring lock "a0952fdf-5570-4112-bc4d-e9f9cee1599c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1000.363952] env[69328]: DEBUG oslo_concurrency.lockutils [req-769ee96d-7216-4de5-a85f-4e5258b838d1 req-daa7dd63-c781-4325-b8d4-41c83dc8cf8a service nova] Lock "a0952fdf-5570-4112-bc4d-e9f9cee1599c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1000.363952] env[69328]: DEBUG oslo_concurrency.lockutils [req-769ee96d-7216-4de5-a85f-4e5258b838d1 req-daa7dd63-c781-4325-b8d4-41c83dc8cf8a service nova] Lock "a0952fdf-5570-4112-bc4d-e9f9cee1599c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1000.364226] env[69328]: DEBUG nova.compute.manager [req-769ee96d-7216-4de5-a85f-4e5258b838d1 req-daa7dd63-c781-4325-b8d4-41c83dc8cf8a service nova] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] No waiting events found dispatching network-vif-unplugged-c74af0b7-ebfb-4563-9208-a18235899a6c {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1000.364530] env[69328]: WARNING nova.compute.manager [req-769ee96d-7216-4de5-a85f-4e5258b838d1 req-daa7dd63-c781-4325-b8d4-41c83dc8cf8a service nova] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Received unexpected event network-vif-unplugged-c74af0b7-ebfb-4563-9208-a18235899a6c for instance with vm_state shelved and task_state shelving_offloading. [ 1000.372779] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1000.373814] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08f30010-452b-436f-b8ce-7d9b41730d96 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.386193] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1000.386599] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a1278bf9-3c9d-4008-b69e-d6efd53ff149 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.415334] env[69328]: DEBUG nova.network.neutron [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] Updating instance_info_cache with network_info: [{"id": "cbad07b8-acca-4410-abd7-78b9b5a05849", "address": "fa:16:3e:ec:3f:04", "network": {"id": "7f5dcab4-3cec-42a1-b589-88cb373af645", "bridge": "br-int", "label": "tempest-ServersTestJSON-1833802368-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d86de4d5055642aa86a29c6768e3db46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b94712a6-b777-47dd-bc06-f9acfce2d936", "external-id": "nsx-vlan-transportzone-494", "segmentation_id": 494, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbad07b8-ac", "ovs_interfaceid": "cbad07b8-acca-4410-abd7-78b9b5a05849", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1000.461257] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1000.461257] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1000.461257] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Deleting the datastore file [datastore2] a0952fdf-5570-4112-bc4d-e9f9cee1599c {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1000.461257] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f7728026-b069-4211-b199-3ae6f62d5b94 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.468681] env[69328]: DEBUG oslo_vmware.api [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Waiting for the task: (returnval){ [ 1000.468681] env[69328]: value = "task-3273784" [ 1000.468681] env[69328]: _type = "Task" [ 1000.468681] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.479088] env[69328]: DEBUG oslo_vmware.api [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273784, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.486103] env[69328]: DEBUG oslo_concurrency.lockutils [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Releasing lock "refresh_cache-76210566-12d7-4f6a-afa1-6329e87e0f85" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1000.605158] env[69328]: DEBUG nova.scheduler.client.report [None req-97563d6f-f1dd-4a10-8a2a-3d9aabc95961 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1000.611985] env[69328]: DEBUG oslo_concurrency.lockutils [None req-dda2c9ea-ab23-4526-ba73-3282180787b9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Releasing lock "refresh_cache-96f604a9-e42c-4aa8-b5b5-edcb34901d94" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1000.612269] env[69328]: DEBUG nova.objects.instance [None req-dda2c9ea-ab23-4526-ba73-3282180787b9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lazy-loading 'migration_context' on Instance uuid 96f604a9-e42c-4aa8-b5b5-edcb34901d94 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1000.624854] env[69328]: DEBUG oslo_vmware.api [None req-f7fd4c57-f252-4c6e-a15f-9c4426398d79 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3273780, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.156397} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.626284] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7fd4c57-f252-4c6e-a15f-9c4426398d79 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1000.626503] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f7fd4c57-f252-4c6e-a15f-9c4426398d79 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1000.626746] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f7fd4c57-f252-4c6e-a15f-9c4426398d79 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1000.627212] env[69328]: INFO nova.compute.manager [None req-f7fd4c57-f252-4c6e-a15f-9c4426398d79 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1000.627319] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f7fd4c57-f252-4c6e-a15f-9c4426398d79 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1000.628052] env[69328]: DEBUG nova.compute.manager [-] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1000.628216] env[69328]: DEBUG nova.network.neutron [-] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1000.664555] env[69328]: DEBUG oslo_vmware.api [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273781, 'name': Rename_Task, 'duration_secs': 0.174476} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.666755] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1000.667076] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b460468e-96b2-4c5e-a6b1-69035688654b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.678265] env[69328]: DEBUG oslo_vmware.api [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Task: {'id': task-3273782, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.142694} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.679657] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1000.680196] env[69328]: DEBUG oslo_vmware.api [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 1000.680196] env[69328]: value = "task-3273785" [ 1000.680196] env[69328]: _type = "Task" [ 1000.680196] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.680927] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-370c4541-8148-4c23-92a0-64be6da3bc82 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.711885] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] 65fccb3f-5e0e-4140-be0a-5ba20f494d50/65fccb3f-5e0e-4140-be0a-5ba20f494d50.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1000.712289] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-da729521-0e38-435a-a66a-7f7e9a79dda4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.736575] env[69328]: DEBUG oslo_vmware.api [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Waiting for the task: (returnval){ [ 1000.736575] env[69328]: value = "task-3273786" [ 1000.736575] env[69328]: _type = "Task" [ 1000.736575] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.748579] env[69328]: DEBUG oslo_vmware.api [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Task: {'id': task-3273786, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.760172] env[69328]: DEBUG nova.compute.manager [req-9ccdef33-c443-4cb2-a601-2c5622124427 req-90d6ba1b-a8c2-4142-84eb-d6f071570888 service nova] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] Received event network-changed-cbad07b8-acca-4410-abd7-78b9b5a05849 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1000.760466] env[69328]: DEBUG nova.compute.manager [req-9ccdef33-c443-4cb2-a601-2c5622124427 req-90d6ba1b-a8c2-4142-84eb-d6f071570888 service nova] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] Refreshing instance network info cache due to event network-changed-cbad07b8-acca-4410-abd7-78b9b5a05849. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1000.760703] env[69328]: DEBUG oslo_concurrency.lockutils [req-9ccdef33-c443-4cb2-a601-2c5622124427 req-90d6ba1b-a8c2-4142-84eb-d6f071570888 service nova] Acquiring lock "refresh_cache-a7d4893f-31d4-449d-96d5-a2a1377d8454" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1000.924045] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Releasing lock "refresh_cache-a7d4893f-31d4-449d-96d5-a2a1377d8454" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1000.924045] env[69328]: DEBUG nova.compute.manager [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] Instance network_info: |[{"id": "cbad07b8-acca-4410-abd7-78b9b5a05849", "address": "fa:16:3e:ec:3f:04", "network": {"id": "7f5dcab4-3cec-42a1-b589-88cb373af645", "bridge": "br-int", "label": "tempest-ServersTestJSON-1833802368-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d86de4d5055642aa86a29c6768e3db46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b94712a6-b777-47dd-bc06-f9acfce2d936", "external-id": "nsx-vlan-transportzone-494", "segmentation_id": 494, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbad07b8-ac", "ovs_interfaceid": "cbad07b8-acca-4410-abd7-78b9b5a05849", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1000.924045] env[69328]: DEBUG oslo_concurrency.lockutils [req-9ccdef33-c443-4cb2-a601-2c5622124427 req-90d6ba1b-a8c2-4142-84eb-d6f071570888 service nova] Acquired lock "refresh_cache-a7d4893f-31d4-449d-96d5-a2a1377d8454" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1000.924045] env[69328]: DEBUG nova.network.neutron [req-9ccdef33-c443-4cb2-a601-2c5622124427 req-90d6ba1b-a8c2-4142-84eb-d6f071570888 service nova] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] Refreshing network info cache for port cbad07b8-acca-4410-abd7-78b9b5a05849 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1000.924045] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ec:3f:04', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b94712a6-b777-47dd-bc06-f9acfce2d936', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cbad07b8-acca-4410-abd7-78b9b5a05849', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1000.929825] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1000.932174] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1000.932174] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7a8cd4f5-e823-4274-8a28-71094a3f6650 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.960387] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1000.960387] env[69328]: value = "task-3273787" [ 1000.960387] env[69328]: _type = "Task" [ 1000.960387] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.970408] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273787, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.981554] env[69328]: DEBUG oslo_vmware.api [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273784, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.412419} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.982044] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1000.982357] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1000.982607] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1001.022391] env[69328]: INFO nova.scheduler.client.report [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Deleted allocations for instance a0952fdf-5570-4112-bc4d-e9f9cee1599c [ 1001.085929] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1001.087778] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64ed78f3-b299-47a7-9265-2527bcb80488 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.097277] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1001.098201] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-11fc0746-55c1-4376-8812-4b34a3da3b2d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.114646] env[69328]: DEBUG oslo_concurrency.lockutils [None req-97563d6f-f1dd-4a10-8a2a-3d9aabc95961 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.007s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1001.120755] env[69328]: DEBUG nova.objects.base [None req-dda2c9ea-ab23-4526-ba73-3282180787b9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Object Instance<96f604a9-e42c-4aa8-b5b5-edcb34901d94> lazy-loaded attributes: info_cache,migration_context {{(pid=69328) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1001.122593] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 4.960s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1001.124838] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ef58d89-65ba-410b-9c54-376e40ace650 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.155992] env[69328]: INFO nova.scheduler.client.report [None req-97563d6f-f1dd-4a10-8a2a-3d9aabc95961 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Deleted allocations for instance b61436f5-0e8b-4da5-9459-cf9487dfd23f [ 1001.157115] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e83835b-a08b-4ec3-8be5-2ea9d2313b53 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.165284] env[69328]: INFO nova.compute.manager [None req-9a1267d1-6bf8-4413-9f5a-2182005617f7 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Get console output [ 1001.165451] env[69328]: WARNING nova.virt.vmwareapi.driver [None req-9a1267d1-6bf8-4413-9f5a-2182005617f7 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] The console log is missing. Check your VSPC configuration [ 1001.168147] env[69328]: DEBUG oslo_vmware.api [None req-dda2c9ea-ab23-4526-ba73-3282180787b9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 1001.168147] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52db5de3-c193-c6c1-3457-1fcaac426e17" [ 1001.168147] env[69328]: _type = "Task" [ 1001.168147] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.181043] env[69328]: DEBUG oslo_vmware.api [None req-dda2c9ea-ab23-4526-ba73-3282180787b9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52db5de3-c193-c6c1-3457-1fcaac426e17, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.191126] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1001.191126] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1001.191126] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Deleting the datastore file [datastore1] 76210566-12d7-4f6a-afa1-6329e87e0f85 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1001.191921] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e5f6c53c-afe7-4542-874d-178728e2b1a8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.197908] env[69328]: DEBUG oslo_vmware.api [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273785, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.201662] env[69328]: DEBUG oslo_vmware.api [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 1001.201662] env[69328]: value = "task-3273789" [ 1001.201662] env[69328]: _type = "Task" [ 1001.201662] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.215608] env[69328]: DEBUG oslo_vmware.api [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273789, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.249741] env[69328]: DEBUG oslo_vmware.api [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Task: {'id': task-3273786, 'name': ReconfigVM_Task, 'duration_secs': 0.390937} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.250388] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Reconfigured VM instance instance-0000005a to attach disk [datastore1] 65fccb3f-5e0e-4140-be0a-5ba20f494d50/65fccb3f-5e0e-4140-be0a-5ba20f494d50.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1001.251376] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-390e336f-8325-4439-b23d-99a81904a51e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.260585] env[69328]: DEBUG oslo_vmware.api [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Waiting for the task: (returnval){ [ 1001.260585] env[69328]: value = "task-3273790" [ 1001.260585] env[69328]: _type = "Task" [ 1001.260585] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.271109] env[69328]: DEBUG oslo_vmware.api [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Task: {'id': task-3273790, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.478382] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273787, 'name': CreateVM_Task, 'duration_secs': 0.423596} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.478663] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1001.479683] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1001.479861] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1001.480651] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1001.483522] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1fe122f6-e557-4bc4-9a7f-a7af4e172aef {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.495804] env[69328]: DEBUG oslo_vmware.api [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 1001.495804] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52bea2f8-5e60-3d60-7386-6ae614237440" [ 1001.495804] env[69328]: _type = "Task" [ 1001.495804] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.509290] env[69328]: DEBUG oslo_vmware.api [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52bea2f8-5e60-3d60-7386-6ae614237440, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.533485] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1001.674085] env[69328]: DEBUG oslo_concurrency.lockutils [None req-97563d6f-f1dd-4a10-8a2a-3d9aabc95961 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "b61436f5-0e8b-4da5-9459-cf9487dfd23f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.655s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1001.683844] env[69328]: DEBUG oslo_vmware.api [None req-dda2c9ea-ab23-4526-ba73-3282180787b9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52db5de3-c193-c6c1-3457-1fcaac426e17, 'name': SearchDatastore_Task, 'duration_secs': 0.032028} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.683844] env[69328]: DEBUG oslo_concurrency.lockutils [None req-dda2c9ea-ab23-4526-ba73-3282180787b9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1001.698755] env[69328]: DEBUG oslo_vmware.api [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273785, 'name': PowerOnVM_Task, 'duration_secs': 0.792092} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.698755] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1001.698960] env[69328]: INFO nova.compute.manager [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] Took 14.95 seconds to spawn the instance on the hypervisor. [ 1001.699206] env[69328]: DEBUG nova.compute.manager [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1001.700489] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-761819b5-677e-4df1-97c5-204afd0d9295 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.724038] env[69328]: DEBUG oslo_vmware.api [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273789, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.206056} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.724582] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1001.724780] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1001.724954] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1001.737608] env[69328]: DEBUG nova.network.neutron [req-9ccdef33-c443-4cb2-a601-2c5622124427 req-90d6ba1b-a8c2-4142-84eb-d6f071570888 service nova] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] Updated VIF entry in instance network info cache for port cbad07b8-acca-4410-abd7-78b9b5a05849. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1001.737992] env[69328]: DEBUG nova.network.neutron [req-9ccdef33-c443-4cb2-a601-2c5622124427 req-90d6ba1b-a8c2-4142-84eb-d6f071570888 service nova] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] Updating instance_info_cache with network_info: [{"id": "cbad07b8-acca-4410-abd7-78b9b5a05849", "address": "fa:16:3e:ec:3f:04", "network": {"id": "7f5dcab4-3cec-42a1-b589-88cb373af645", "bridge": "br-int", "label": "tempest-ServersTestJSON-1833802368-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d86de4d5055642aa86a29c6768e3db46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b94712a6-b777-47dd-bc06-f9acfce2d936", "external-id": "nsx-vlan-transportzone-494", "segmentation_id": 494, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbad07b8-ac", "ovs_interfaceid": "cbad07b8-acca-4410-abd7-78b9b5a05849", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1001.756087] env[69328]: INFO nova.scheduler.client.report [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Deleted allocations for instance 76210566-12d7-4f6a-afa1-6329e87e0f85 [ 1001.774203] env[69328]: DEBUG oslo_vmware.api [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Task: {'id': task-3273790, 'name': Rename_Task, 'duration_secs': 0.198664} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.774535] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1001.774783] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0ba7f7a0-8deb-4027-8e86-938fab768805 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.782366] env[69328]: DEBUG oslo_vmware.api [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Waiting for the task: (returnval){ [ 1001.782366] env[69328]: value = "task-3273791" [ 1001.782366] env[69328]: _type = "Task" [ 1001.782366] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.792131] env[69328]: DEBUG oslo_vmware.api [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Task: {'id': task-3273791, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.006587] env[69328]: DEBUG oslo_vmware.api [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52bea2f8-5e60-3d60-7386-6ae614237440, 'name': SearchDatastore_Task, 'duration_secs': 0.02223} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.006900] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1002.007144] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1002.007396] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.007544] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1002.007723] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1002.008353] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-68f25561-fa5a-4d42-ad03-439b36e7a3f5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.024982] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1002.025212] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1002.025952] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d627360-2be0-4842-bbf8-599e24419595 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.032996] env[69328]: DEBUG oslo_vmware.api [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 1002.032996] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52c4404d-a843-77b2-1e5a-ad75f5e19676" [ 1002.032996] env[69328]: _type = "Task" [ 1002.032996] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.043608] env[69328]: DEBUG oslo_vmware.api [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52c4404d-a843-77b2-1e5a-ad75f5e19676, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.065411] env[69328]: DEBUG nova.network.neutron [-] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1002.140250] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Applying migration context for instance 96f604a9-e42c-4aa8-b5b5-edcb34901d94 as it has an incoming, in-progress migration d9a4d9ef-a86e-49ef-841e-1e4093b2e6d7. Migration status is confirming {{(pid=69328) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1002.142799] env[69328]: INFO nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Updating resource usage from migration d9a4d9ef-a86e-49ef-841e-1e4093b2e6d7 [ 1002.164289] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance bc9c3a41-7264-4d69-bc15-397b5fa0a8ad actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1002.164441] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance b0a1441c-81e2-4131-a2ff-f5042d559d9f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1002.164561] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1002.164673] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 55d9ba65-e5c8-446a-a209-a840f30ff02c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1002.164785] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance a0b663eb-31b0-4de1-94bc-660a7d9c1c7b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1002.164894] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance f1be93b2-08db-41fe-87c4-f4e5f964cfa4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1002.165016] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 3ba646e8-a5c8-4917-a1c4-32b37affb598 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1002.165139] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance c751ef77-c3be-46cd-b7eb-fe139bf0998b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1002.165248] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 1413dcfe-3570-4657-b811-81a1acc159d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1002.165354] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance b21ff3c9-d53a-4065-a271-682c2f1b895d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1002.165460] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 65e38a02-880b-46e2-8866-645a9fc17c7a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1002.165564] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance a95d01cf-c26b-466c-a5b6-a7e43f0321fa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1002.165668] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 52c87371-4142-40d6-ac68-804aabd9f823 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1002.165803] env[69328]: WARNING nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 51a9c492-6f91-4186-b550-ef12284b8a84 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1002.165911] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Migration d9a4d9ef-a86e-49ef-841e-1e4093b2e6d7 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1002.166193] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 96f604a9-e42c-4aa8-b5b5-edcb34901d94 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1002.166364] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 9f6f8e97-cb21-4984-af08-a63ea4578eef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1002.166480] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 65fccb3f-5e0e-4140-be0a-5ba20f494d50 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1002.166586] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance a7d4893f-31d4-449d-96d5-a2a1377d8454 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1002.236028] env[69328]: INFO nova.compute.manager [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] Took 29.36 seconds to build instance. [ 1002.241136] env[69328]: DEBUG oslo_concurrency.lockutils [req-9ccdef33-c443-4cb2-a601-2c5622124427 req-90d6ba1b-a8c2-4142-84eb-d6f071570888 service nova] Releasing lock "refresh_cache-a7d4893f-31d4-449d-96d5-a2a1377d8454" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1002.261142] env[69328]: DEBUG oslo_concurrency.lockutils [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1002.294220] env[69328]: DEBUG oslo_vmware.api [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Task: {'id': task-3273791, 'name': PowerOnVM_Task} progress is 90%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.379649] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7b0be2c6-034d-44a2-9992-d33f2e03ccd0 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Acquiring lock "3b4b6687-fb6d-4bb7-8604-20a3ba706ff3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1002.380085] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7b0be2c6-034d-44a2-9992-d33f2e03ccd0 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Lock "3b4b6687-fb6d-4bb7-8604-20a3ba706ff3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1002.380170] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7b0be2c6-034d-44a2-9992-d33f2e03ccd0 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Acquiring lock "3b4b6687-fb6d-4bb7-8604-20a3ba706ff3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1002.380514] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7b0be2c6-034d-44a2-9992-d33f2e03ccd0 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Lock "3b4b6687-fb6d-4bb7-8604-20a3ba706ff3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1002.380514] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7b0be2c6-034d-44a2-9992-d33f2e03ccd0 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Lock "3b4b6687-fb6d-4bb7-8604-20a3ba706ff3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1002.383569] env[69328]: INFO nova.compute.manager [None req-7b0be2c6-034d-44a2-9992-d33f2e03ccd0 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Terminating instance [ 1002.483876] env[69328]: DEBUG nova.compute.manager [req-e5687cf8-87e6-4433-bf45-eed1955a5245 req-295f8e59-d216-405f-a8ce-7b6310ca4406 service nova] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Received event network-changed-c74af0b7-ebfb-4563-9208-a18235899a6c {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1002.484359] env[69328]: DEBUG nova.compute.manager [req-e5687cf8-87e6-4433-bf45-eed1955a5245 req-295f8e59-d216-405f-a8ce-7b6310ca4406 service nova] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Refreshing instance network info cache due to event network-changed-c74af0b7-ebfb-4563-9208-a18235899a6c. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1002.484705] env[69328]: DEBUG oslo_concurrency.lockutils [req-e5687cf8-87e6-4433-bf45-eed1955a5245 req-295f8e59-d216-405f-a8ce-7b6310ca4406 service nova] Acquiring lock "refresh_cache-a0952fdf-5570-4112-bc4d-e9f9cee1599c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.484872] env[69328]: DEBUG oslo_concurrency.lockutils [req-e5687cf8-87e6-4433-bf45-eed1955a5245 req-295f8e59-d216-405f-a8ce-7b6310ca4406 service nova] Acquired lock "refresh_cache-a0952fdf-5570-4112-bc4d-e9f9cee1599c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1002.485192] env[69328]: DEBUG nova.network.neutron [req-e5687cf8-87e6-4433-bf45-eed1955a5245 req-295f8e59-d216-405f-a8ce-7b6310ca4406 service nova] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Refreshing network info cache for port c74af0b7-ebfb-4563-9208-a18235899a6c {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1002.547592] env[69328]: DEBUG oslo_vmware.api [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52c4404d-a843-77b2-1e5a-ad75f5e19676, 'name': SearchDatastore_Task, 'duration_secs': 0.017211} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.548509] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a34f7b19-2b63-4f2e-a6d6-f6218310c5f9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.554491] env[69328]: DEBUG oslo_vmware.api [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 1002.554491] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5278e666-82cf-d7a0-66a8-2f7b1e03453b" [ 1002.554491] env[69328]: _type = "Task" [ 1002.554491] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.563157] env[69328]: DEBUG oslo_vmware.api [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5278e666-82cf-d7a0-66a8-2f7b1e03453b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.567895] env[69328]: INFO nova.compute.manager [-] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Took 1.94 seconds to deallocate network for instance. [ 1002.673560] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance dc050589-e37a-4798-9532-df4ecfab7eb1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1002.737268] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e2102fed-c967-4b78-bb06-321a1e88abcf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Lock "9f6f8e97-cb21-4984-af08-a63ea4578eef" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.868s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1002.793858] env[69328]: DEBUG oslo_vmware.api [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Task: {'id': task-3273791, 'name': PowerOnVM_Task, 'duration_secs': 0.62837} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.796145] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1002.796145] env[69328]: INFO nova.compute.manager [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Took 13.43 seconds to spawn the instance on the hypervisor. [ 1002.796145] env[69328]: DEBUG nova.compute.manager [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1002.796830] env[69328]: DEBUG nova.compute.manager [req-642e114a-7ef5-4d46-b20a-ba1a5ab4e2d5 req-b69cd52d-a4b3-43f1-8fd8-cfb7f2e7ea80 service nova] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Received event network-vif-unplugged-a3cab44b-0572-4007-bab9-e84ba084f70a {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1002.797038] env[69328]: DEBUG oslo_concurrency.lockutils [req-642e114a-7ef5-4d46-b20a-ba1a5ab4e2d5 req-b69cd52d-a4b3-43f1-8fd8-cfb7f2e7ea80 service nova] Acquiring lock "76210566-12d7-4f6a-afa1-6329e87e0f85-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1002.797247] env[69328]: DEBUG oslo_concurrency.lockutils [req-642e114a-7ef5-4d46-b20a-ba1a5ab4e2d5 req-b69cd52d-a4b3-43f1-8fd8-cfb7f2e7ea80 service nova] Lock "76210566-12d7-4f6a-afa1-6329e87e0f85-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1002.797411] env[69328]: DEBUG oslo_concurrency.lockutils [req-642e114a-7ef5-4d46-b20a-ba1a5ab4e2d5 req-b69cd52d-a4b3-43f1-8fd8-cfb7f2e7ea80 service nova] Lock "76210566-12d7-4f6a-afa1-6329e87e0f85-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1002.797576] env[69328]: DEBUG nova.compute.manager [req-642e114a-7ef5-4d46-b20a-ba1a5ab4e2d5 req-b69cd52d-a4b3-43f1-8fd8-cfb7f2e7ea80 service nova] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] No waiting events found dispatching network-vif-unplugged-a3cab44b-0572-4007-bab9-e84ba084f70a {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1002.797739] env[69328]: WARNING nova.compute.manager [req-642e114a-7ef5-4d46-b20a-ba1a5ab4e2d5 req-b69cd52d-a4b3-43f1-8fd8-cfb7f2e7ea80 service nova] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Received unexpected event network-vif-unplugged-a3cab44b-0572-4007-bab9-e84ba084f70a for instance with vm_state shelved_offloaded and task_state None. [ 1002.797896] env[69328]: DEBUG nova.compute.manager [req-642e114a-7ef5-4d46-b20a-ba1a5ab4e2d5 req-b69cd52d-a4b3-43f1-8fd8-cfb7f2e7ea80 service nova] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Received event network-changed-a3cab44b-0572-4007-bab9-e84ba084f70a {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1002.798059] env[69328]: DEBUG nova.compute.manager [req-642e114a-7ef5-4d46-b20a-ba1a5ab4e2d5 req-b69cd52d-a4b3-43f1-8fd8-cfb7f2e7ea80 service nova] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Refreshing instance network info cache due to event network-changed-a3cab44b-0572-4007-bab9-e84ba084f70a. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1002.798263] env[69328]: DEBUG oslo_concurrency.lockutils [req-642e114a-7ef5-4d46-b20a-ba1a5ab4e2d5 req-b69cd52d-a4b3-43f1-8fd8-cfb7f2e7ea80 service nova] Acquiring lock "refresh_cache-76210566-12d7-4f6a-afa1-6329e87e0f85" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.798383] env[69328]: DEBUG oslo_concurrency.lockutils [req-642e114a-7ef5-4d46-b20a-ba1a5ab4e2d5 req-b69cd52d-a4b3-43f1-8fd8-cfb7f2e7ea80 service nova] Acquired lock "refresh_cache-76210566-12d7-4f6a-afa1-6329e87e0f85" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1002.798539] env[69328]: DEBUG nova.network.neutron [req-642e114a-7ef5-4d46-b20a-ba1a5ab4e2d5 req-b69cd52d-a4b3-43f1-8fd8-cfb7f2e7ea80 service nova] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Refreshing network info cache for port a3cab44b-0572-4007-bab9-e84ba084f70a {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1002.801164] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d3c0284-d0f9-4c97-98e7-f2ce7a10e3c2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.888444] env[69328]: DEBUG nova.compute.manager [None req-7b0be2c6-034d-44a2-9992-d33f2e03ccd0 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1002.888672] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7b0be2c6-034d-44a2-9992-d33f2e03ccd0 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1002.889987] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5da9ad35-9b95-4bef-9381-bbb0d9999f32 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.898121] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b0be2c6-034d-44a2-9992-d33f2e03ccd0 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1002.899123] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0c81c0f0-0100-4c69-80b3-0f926395c0b5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.906139] env[69328]: DEBUG oslo_vmware.api [None req-7b0be2c6-034d-44a2-9992-d33f2e03ccd0 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Waiting for the task: (returnval){ [ 1002.906139] env[69328]: value = "task-3273792" [ 1002.906139] env[69328]: _type = "Task" [ 1002.906139] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.916063] env[69328]: DEBUG oslo_vmware.api [None req-7b0be2c6-034d-44a2-9992-d33f2e03ccd0 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Task: {'id': task-3273792, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.991937] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "204286d7-c806-48cb-85e9-b2a78571777c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1002.991937] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "204286d7-c806-48cb-85e9-b2a78571777c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1003.067722] env[69328]: DEBUG oslo_vmware.api [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5278e666-82cf-d7a0-66a8-2f7b1e03453b, 'name': SearchDatastore_Task, 'duration_secs': 0.032803} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.068020] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1003.068314] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] a7d4893f-31d4-449d-96d5-a2a1377d8454/a7d4893f-31d4-449d-96d5-a2a1377d8454.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1003.068631] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2f49ddb2-35e4-4fa0-ade3-b6da13d36fc4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.074890] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f7fd4c57-f252-4c6e-a15f-9c4426398d79 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1003.081018] env[69328]: DEBUG oslo_vmware.api [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 1003.081018] env[69328]: value = "task-3273793" [ 1003.081018] env[69328]: _type = "Task" [ 1003.081018] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.092977] env[69328]: DEBUG oslo_vmware.api [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273793, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.177663] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance dd43adb3-b073-483a-81dd-69df7f746874 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1003.220265] env[69328]: DEBUG nova.network.neutron [req-e5687cf8-87e6-4433-bf45-eed1955a5245 req-295f8e59-d216-405f-a8ce-7b6310ca4406 service nova] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Updated VIF entry in instance network info cache for port c74af0b7-ebfb-4563-9208-a18235899a6c. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1003.220825] env[69328]: DEBUG nova.network.neutron [req-e5687cf8-87e6-4433-bf45-eed1955a5245 req-295f8e59-d216-405f-a8ce-7b6310ca4406 service nova] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Updating instance_info_cache with network_info: [{"id": "c74af0b7-ebfb-4563-9208-a18235899a6c", "address": "fa:16:3e:35:bb:fc", "network": {"id": "cc75e08f-f0f3-4b52-9b40-0de73f044554", "bridge": null, "label": "tempest-ServersNegativeTestJSON-1326858830-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1393040bf5304571ae4b66d0a4ee7b6e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapc74af0b7-eb", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.328064] env[69328]: INFO nova.compute.manager [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Took 26.83 seconds to build instance. [ 1003.419076] env[69328]: DEBUG oslo_vmware.api [None req-7b0be2c6-034d-44a2-9992-d33f2e03ccd0 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Task: {'id': task-3273792, 'name': PowerOffVM_Task, 'duration_secs': 0.375013} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.419390] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b0be2c6-034d-44a2-9992-d33f2e03ccd0 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1003.419570] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7b0be2c6-034d-44a2-9992-d33f2e03ccd0 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1003.422418] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-abe583b8-1ee4-4787-b76e-21dc3b3e1b20 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.494069] env[69328]: DEBUG nova.compute.manager [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1003.509087] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7b0be2c6-034d-44a2-9992-d33f2e03ccd0 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1003.509331] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7b0be2c6-034d-44a2-9992-d33f2e03ccd0 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1003.509516] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b0be2c6-034d-44a2-9992-d33f2e03ccd0 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Deleting the datastore file [datastore1] 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1003.509828] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-01ad041b-f11c-4889-b8c2-4db46105bfcd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.521030] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Acquiring lock "a0952fdf-5570-4112-bc4d-e9f9cee1599c" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1003.522627] env[69328]: DEBUG oslo_vmware.api [None req-7b0be2c6-034d-44a2-9992-d33f2e03ccd0 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Waiting for the task: (returnval){ [ 1003.522627] env[69328]: value = "task-3273795" [ 1003.522627] env[69328]: _type = "Task" [ 1003.522627] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.540557] env[69328]: DEBUG oslo_vmware.api [None req-7b0be2c6-034d-44a2-9992-d33f2e03ccd0 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Task: {'id': task-3273795, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.598012] env[69328]: DEBUG oslo_vmware.api [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273793, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.681669] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 6b9757de-a274-4f4d-9b73-cc2ca92b4732 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1003.724437] env[69328]: DEBUG oslo_concurrency.lockutils [req-e5687cf8-87e6-4433-bf45-eed1955a5245 req-295f8e59-d216-405f-a8ce-7b6310ca4406 service nova] Releasing lock "refresh_cache-a0952fdf-5570-4112-bc4d-e9f9cee1599c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1003.724437] env[69328]: DEBUG nova.compute.manager [req-e5687cf8-87e6-4433-bf45-eed1955a5245 req-295f8e59-d216-405f-a8ce-7b6310ca4406 service nova] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Received event network-vif-deleted-e8f19fa7-2ac8-47ea-bd97-b8bcdc8f3ed6 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1003.813904] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquiring lock "c7321021-15ea-47f4-a8ca-1045f2966394" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1003.813904] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Lock "c7321021-15ea-47f4-a8ca-1045f2966394" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1003.820059] env[69328]: DEBUG nova.network.neutron [req-642e114a-7ef5-4d46-b20a-ba1a5ab4e2d5 req-b69cd52d-a4b3-43f1-8fd8-cfb7f2e7ea80 service nova] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Updated VIF entry in instance network info cache for port a3cab44b-0572-4007-bab9-e84ba084f70a. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1003.820059] env[69328]: DEBUG nova.network.neutron [req-642e114a-7ef5-4d46-b20a-ba1a5ab4e2d5 req-b69cd52d-a4b3-43f1-8fd8-cfb7f2e7ea80 service nova] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Updating instance_info_cache with network_info: [{"id": "a3cab44b-0572-4007-bab9-e84ba084f70a", "address": "fa:16:3e:02:7d:25", "network": {"id": "3be6b159-5d5c-4752-b79d-0ed6110569d0", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-915151552-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.128", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f50ac50ef6ae4abc83a8064746de7029", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapa3cab44b-05", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.830526] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7f317e02-604a-4d2d-8531-665d0b0aa231 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Lock "65fccb3f-5e0e-4140-be0a-5ba20f494d50" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.980s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1004.024683] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1004.034801] env[69328]: DEBUG oslo_vmware.api [None req-7b0be2c6-034d-44a2-9992-d33f2e03ccd0 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Task: {'id': task-3273795, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.095983] env[69328]: DEBUG oslo_vmware.api [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273793, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.558735} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.096278] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] a7d4893f-31d4-449d-96d5-a2a1377d8454/a7d4893f-31d4-449d-96d5-a2a1377d8454.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1004.096485] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1004.096747] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-af3269e6-ed15-4235-90f3-f52a13c84770 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.105441] env[69328]: DEBUG oslo_vmware.api [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 1004.105441] env[69328]: value = "task-3273796" [ 1004.105441] env[69328]: _type = "Task" [ 1004.105441] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.120869] env[69328]: DEBUG oslo_vmware.api [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273796, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.184279] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 19f537b7-90fc-4832-b137-e042e00a508b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1004.185489] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Total usable vcpus: 48, total allocated vcpus: 19 {{(pid=69328) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1004.185489] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4224MB phys_disk=200GB used_disk=20GB total_vcpus=48 used_vcpus=19 pci_stats=[] {{(pid=69328) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1004.318371] env[69328]: DEBUG nova.compute.manager [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1004.322000] env[69328]: DEBUG oslo_concurrency.lockutils [req-642e114a-7ef5-4d46-b20a-ba1a5ab4e2d5 req-b69cd52d-a4b3-43f1-8fd8-cfb7f2e7ea80 service nova] Releasing lock "refresh_cache-76210566-12d7-4f6a-afa1-6329e87e0f85" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1004.537409] env[69328]: DEBUG oslo_vmware.api [None req-7b0be2c6-034d-44a2-9992-d33f2e03ccd0 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Task: {'id': task-3273795, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.529871} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.537679] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b0be2c6-034d-44a2-9992-d33f2e03ccd0 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1004.537866] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7b0be2c6-034d-44a2-9992-d33f2e03ccd0 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1004.538071] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7b0be2c6-034d-44a2-9992-d33f2e03ccd0 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1004.539109] env[69328]: INFO nova.compute.manager [None req-7b0be2c6-034d-44a2-9992-d33f2e03ccd0 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Took 1.65 seconds to destroy the instance on the hypervisor. [ 1004.539109] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7b0be2c6-034d-44a2-9992-d33f2e03ccd0 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1004.539109] env[69328]: DEBUG nova.compute.manager [-] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1004.539109] env[69328]: DEBUG nova.network.neutron [-] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1004.553352] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8e1d7ca-f941-4bf6-beb5-828a67cd71f9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.561153] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77555e69-9932-4ce6-8378-50f6a0827844 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.596161] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e3ff490-cb00-42c2-adc2-c429a91ba19d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.605795] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8d3cf3d-622c-4eeb-a76a-6eab55f09597 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.615211] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Acquiring lock "76210566-12d7-4f6a-afa1-6329e87e0f85" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1004.626459] env[69328]: DEBUG oslo_vmware.api [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273796, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069548} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.626941] env[69328]: DEBUG nova.compute.provider_tree [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1004.628951] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1004.629948] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ca8ef18-1502-4d7e-b193-ff5b04c4faad {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.655475] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] Reconfiguring VM instance instance-0000005b to attach disk [datastore1] a7d4893f-31d4-449d-96d5-a2a1377d8454/a7d4893f-31d4-449d-96d5-a2a1377d8454.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1004.655475] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-22f4bd91-162c-4b10-a753-5aa83828ef44 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.678617] env[69328]: DEBUG oslo_vmware.api [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 1004.678617] env[69328]: value = "task-3273797" [ 1004.678617] env[69328]: _type = "Task" [ 1004.678617] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.689862] env[69328]: DEBUG oslo_vmware.api [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273797, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.844660] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1004.899451] env[69328]: DEBUG nova.compute.manager [req-b81354f6-0613-4505-a3ed-292ba29681f9 req-c2e485d8-4bb9-4718-a936-f1de0477fbc5 service nova] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Received event network-changed-3b413041-b9e3-47e2-a4f8-f828e31f079a {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1004.899772] env[69328]: DEBUG nova.compute.manager [req-b81354f6-0613-4505-a3ed-292ba29681f9 req-c2e485d8-4bb9-4718-a936-f1de0477fbc5 service nova] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Refreshing instance network info cache due to event network-changed-3b413041-b9e3-47e2-a4f8-f828e31f079a. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1004.899869] env[69328]: DEBUG oslo_concurrency.lockutils [req-b81354f6-0613-4505-a3ed-292ba29681f9 req-c2e485d8-4bb9-4718-a936-f1de0477fbc5 service nova] Acquiring lock "refresh_cache-65fccb3f-5e0e-4140-be0a-5ba20f494d50" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1004.899984] env[69328]: DEBUG oslo_concurrency.lockutils [req-b81354f6-0613-4505-a3ed-292ba29681f9 req-c2e485d8-4bb9-4718-a936-f1de0477fbc5 service nova] Acquired lock "refresh_cache-65fccb3f-5e0e-4140-be0a-5ba20f494d50" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1004.900171] env[69328]: DEBUG nova.network.neutron [req-b81354f6-0613-4505-a3ed-292ba29681f9 req-c2e485d8-4bb9-4718-a936-f1de0477fbc5 service nova] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Refreshing network info cache for port 3b413041-b9e3-47e2-a4f8-f828e31f079a {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1005.131026] env[69328]: DEBUG nova.scheduler.client.report [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1005.190562] env[69328]: DEBUG oslo_vmware.api [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273797, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.558477] env[69328]: DEBUG nova.network.neutron [-] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1005.635893] env[69328]: DEBUG nova.network.neutron [req-b81354f6-0613-4505-a3ed-292ba29681f9 req-c2e485d8-4bb9-4718-a936-f1de0477fbc5 service nova] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Updated VIF entry in instance network info cache for port 3b413041-b9e3-47e2-a4f8-f828e31f079a. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1005.636296] env[69328]: DEBUG nova.network.neutron [req-b81354f6-0613-4505-a3ed-292ba29681f9 req-c2e485d8-4bb9-4718-a936-f1de0477fbc5 service nova] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Updating instance_info_cache with network_info: [{"id": "3b413041-b9e3-47e2-a4f8-f828e31f079a", "address": "fa:16:3e:ce:7b:87", "network": {"id": "4e070871-25f8-4279-bd7d-24dcf5a39f03", "bridge": "br-int", "label": "tempest-ServersTestJSON-1512383675-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.189", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38528276c7744d798af4057d29c88ddb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61b8f0db-488e-42d7-bf6c-6c1665cd5616", "external-id": "nsx-vlan-transportzone-655", "segmentation_id": 655, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b413041-b9", "ovs_interfaceid": "3b413041-b9e3-47e2-a4f8-f828e31f079a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1005.637941] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69328) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1005.638181] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 4.517s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1005.638706] env[69328]: DEBUG oslo_concurrency.lockutils [None req-39496824-d7d3-42e2-a8f1-da97e3053f4e tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.614s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1005.638902] env[69328]: DEBUG oslo_concurrency.lockutils [None req-39496824-d7d3-42e2-a8f1-da97e3053f4e tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1005.641220] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.769s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1005.642865] env[69328]: INFO nova.compute.claims [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1005.671762] env[69328]: INFO nova.scheduler.client.report [None req-39496824-d7d3-42e2-a8f1-da97e3053f4e tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Deleted allocations for instance 51a9c492-6f91-4186-b550-ef12284b8a84 [ 1005.691655] env[69328]: DEBUG oslo_vmware.api [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273797, 'name': ReconfigVM_Task, 'duration_secs': 0.559732} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.692288] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] Reconfigured VM instance instance-0000005b to attach disk [datastore1] a7d4893f-31d4-449d-96d5-a2a1377d8454/a7d4893f-31d4-449d-96d5-a2a1377d8454.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1005.692641] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a6f9098f-e137-41e8-a65a-7d8b3fd2faab {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.703921] env[69328]: DEBUG oslo_vmware.api [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 1005.703921] env[69328]: value = "task-3273798" [ 1005.703921] env[69328]: _type = "Task" [ 1005.703921] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.713467] env[69328]: DEBUG oslo_vmware.api [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273798, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.063401] env[69328]: INFO nova.compute.manager [-] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Took 1.52 seconds to deallocate network for instance. [ 1006.139828] env[69328]: DEBUG oslo_concurrency.lockutils [req-b81354f6-0613-4505-a3ed-292ba29681f9 req-c2e485d8-4bb9-4718-a936-f1de0477fbc5 service nova] Releasing lock "refresh_cache-65fccb3f-5e0e-4140-be0a-5ba20f494d50" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1006.178377] env[69328]: DEBUG oslo_concurrency.lockutils [None req-39496824-d7d3-42e2-a8f1-da97e3053f4e tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lock "51a9c492-6f91-4186-b550-ef12284b8a84" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.611s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1006.215178] env[69328]: DEBUG oslo_vmware.api [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273798, 'name': Rename_Task, 'duration_secs': 0.383525} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.215178] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1006.215399] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-89129b51-7f99-42dc-949d-f9ecd0ec816f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.223388] env[69328]: DEBUG oslo_vmware.api [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 1006.223388] env[69328]: value = "task-3273799" [ 1006.223388] env[69328]: _type = "Task" [ 1006.223388] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.231977] env[69328]: DEBUG oslo_vmware.api [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273799, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.571202] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7b0be2c6-034d-44a2-9992-d33f2e03ccd0 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1006.678821] env[69328]: DEBUG oslo_concurrency.lockutils [None req-be70f0a8-60ab-4579-aab1-4d42d40bbc27 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquiring lock "b21ff3c9-d53a-4065-a271-682c2f1b895d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1006.679126] env[69328]: DEBUG oslo_concurrency.lockutils [None req-be70f0a8-60ab-4579-aab1-4d42d40bbc27 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lock "b21ff3c9-d53a-4065-a271-682c2f1b895d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1006.679337] env[69328]: DEBUG oslo_concurrency.lockutils [None req-be70f0a8-60ab-4579-aab1-4d42d40bbc27 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquiring lock "b21ff3c9-d53a-4065-a271-682c2f1b895d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1006.679531] env[69328]: DEBUG oslo_concurrency.lockutils [None req-be70f0a8-60ab-4579-aab1-4d42d40bbc27 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lock "b21ff3c9-d53a-4065-a271-682c2f1b895d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1006.679734] env[69328]: DEBUG oslo_concurrency.lockutils [None req-be70f0a8-60ab-4579-aab1-4d42d40bbc27 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lock "b21ff3c9-d53a-4065-a271-682c2f1b895d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1006.684232] env[69328]: INFO nova.compute.manager [None req-be70f0a8-60ab-4579-aab1-4d42d40bbc27 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Terminating instance [ 1006.736978] env[69328]: DEBUG oslo_vmware.api [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273799, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.927603] env[69328]: DEBUG nova.compute.manager [req-166ee304-8ef4-48d6-9660-d7cb1e1b949f req-a4d66501-262b-4d98-bbf8-98274f82b370 service nova] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Received event network-vif-deleted-4ffb2723-2cb7-4f04-8e1b-208a6329288e {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1006.977086] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f0a9371-08d1-4571-97d5-fcf0de552472 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.985038] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae1e3684-df39-4387-9d9a-7b0a2d5257aa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.015914] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-829bea0f-cf15-4922-ab90-59d600438fba {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.024335] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eca325d-a59c-440d-b3f1-1947c746f4a8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.040491] env[69328]: DEBUG nova.compute.provider_tree [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1007.150107] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquiring lock "6ad357d9-c35a-4fdb-8dd0-39a0617bf85e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1007.150372] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lock "6ad357d9-c35a-4fdb-8dd0-39a0617bf85e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1007.188265] env[69328]: DEBUG nova.compute.manager [None req-be70f0a8-60ab-4579-aab1-4d42d40bbc27 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1007.188477] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-be70f0a8-60ab-4579-aab1-4d42d40bbc27 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1007.189369] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d65c185-4087-47db-9a56-c448a77bde24 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.197822] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-be70f0a8-60ab-4579-aab1-4d42d40bbc27 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1007.198108] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1d526716-fe04-449f-839f-62c033d046b8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.205994] env[69328]: DEBUG oslo_vmware.api [None req-be70f0a8-60ab-4579-aab1-4d42d40bbc27 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 1007.205994] env[69328]: value = "task-3273800" [ 1007.205994] env[69328]: _type = "Task" [ 1007.205994] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.215160] env[69328]: DEBUG oslo_vmware.api [None req-be70f0a8-60ab-4579-aab1-4d42d40bbc27 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273800, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.237317] env[69328]: DEBUG oslo_vmware.api [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273799, 'name': PowerOnVM_Task, 'duration_secs': 0.909277} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.237317] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1007.237317] env[69328]: INFO nova.compute.manager [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] Took 9.16 seconds to spawn the instance on the hypervisor. [ 1007.237317] env[69328]: DEBUG nova.compute.manager [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1007.238528] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-112801c8-0e05-4e93-829e-cb582276722d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.544602] env[69328]: DEBUG nova.scheduler.client.report [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1007.653495] env[69328]: DEBUG nova.compute.manager [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1007.716917] env[69328]: DEBUG oslo_vmware.api [None req-be70f0a8-60ab-4579-aab1-4d42d40bbc27 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273800, 'name': PowerOffVM_Task, 'duration_secs': 0.256439} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.718325] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-be70f0a8-60ab-4579-aab1-4d42d40bbc27 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1007.718325] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-be70f0a8-60ab-4579-aab1-4d42d40bbc27 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1007.718325] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-08a9f545-34b9-4aa6-bc67-8f7e25b0d475 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.769985] env[69328]: INFO nova.compute.manager [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] Took 25.49 seconds to build instance. [ 1008.052023] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.411s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1008.052579] env[69328]: DEBUG nova.compute.manager [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1008.055725] env[69328]: DEBUG oslo_concurrency.lockutils [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.323s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1008.056966] env[69328]: INFO nova.compute.claims [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1008.120922] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-be70f0a8-60ab-4579-aab1-4d42d40bbc27 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1008.121183] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-be70f0a8-60ab-4579-aab1-4d42d40bbc27 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1008.121369] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-be70f0a8-60ab-4579-aab1-4d42d40bbc27 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Deleting the datastore file [datastore1] b21ff3c9-d53a-4065-a271-682c2f1b895d {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1008.121641] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-33a6f9a7-3654-4773-b1f1-410abc5c45f4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.129656] env[69328]: DEBUG oslo_vmware.api [None req-be70f0a8-60ab-4579-aab1-4d42d40bbc27 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 1008.129656] env[69328]: value = "task-3273802" [ 1008.129656] env[69328]: _type = "Task" [ 1008.129656] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.137806] env[69328]: DEBUG oslo_vmware.api [None req-be70f0a8-60ab-4579-aab1-4d42d40bbc27 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273802, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.171968] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1008.272479] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f9583d69-ad48-4dd7-8c6e-4d609b173da7 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "a7d4893f-31d4-449d-96d5-a2a1377d8454" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.011s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1008.419153] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "ac0f967d-18c8-45d8-94ca-829a1fe11451" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1008.419391] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "ac0f967d-18c8-45d8-94ca-829a1fe11451" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1008.561519] env[69328]: DEBUG nova.compute.utils [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1008.565411] env[69328]: DEBUG nova.compute.manager [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1008.565594] env[69328]: DEBUG nova.network.neutron [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1008.606948] env[69328]: DEBUG nova.policy [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '69ca01fd1d0f42b0b05a5426da9753ad', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '30209bc93a4042488f15c73b7e4733d5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 1008.642728] env[69328]: DEBUG oslo_vmware.api [None req-be70f0a8-60ab-4579-aab1-4d42d40bbc27 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273802, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.924018] env[69328]: DEBUG nova.compute.manager [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1008.953766] env[69328]: DEBUG nova.network.neutron [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Successfully created port: 95776220-5fd9-42a1-8bf9-cfb9fe49d62d {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1009.066078] env[69328]: DEBUG nova.compute.manager [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1009.141582] env[69328]: DEBUG oslo_vmware.api [None req-be70f0a8-60ab-4579-aab1-4d42d40bbc27 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273802, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.568569} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.141889] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-be70f0a8-60ab-4579-aab1-4d42d40bbc27 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1009.142100] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-be70f0a8-60ab-4579-aab1-4d42d40bbc27 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1009.142288] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-be70f0a8-60ab-4579-aab1-4d42d40bbc27 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1009.142462] env[69328]: INFO nova.compute.manager [None req-be70f0a8-60ab-4579-aab1-4d42d40bbc27 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Took 1.95 seconds to destroy the instance on the hypervisor. [ 1009.142734] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-be70f0a8-60ab-4579-aab1-4d42d40bbc27 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1009.142957] env[69328]: DEBUG nova.compute.manager [-] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1009.143059] env[69328]: DEBUG nova.network.neutron [-] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1009.454662] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1009.494748] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51a9532e-c581-45d6-b6be-0c62b294993c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.500308] env[69328]: DEBUG nova.compute.manager [req-36c6185c-3b99-441c-b2c4-bcfa0f9688aa req-55731f35-cc9b-4cdc-b49e-ad2d07b29f64 service nova] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Received event network-vif-deleted-3a99454a-a8d4-4939-8e61-b21d121522f2 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1009.500521] env[69328]: INFO nova.compute.manager [req-36c6185c-3b99-441c-b2c4-bcfa0f9688aa req-55731f35-cc9b-4cdc-b49e-ad2d07b29f64 service nova] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Neutron deleted interface 3a99454a-a8d4-4939-8e61-b21d121522f2; detaching it from the instance and deleting it from the info cache [ 1009.500695] env[69328]: DEBUG nova.network.neutron [req-36c6185c-3b99-441c-b2c4-bcfa0f9688aa req-55731f35-cc9b-4cdc-b49e-ad2d07b29f64 service nova] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1009.505831] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c04f4562-1f73-4698-acb3-00273032a10e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.542023] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b13b3838-275d-4dbe-99f8-03db67b1d379 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.551437] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50a6a6fd-3465-4871-8d67-c89bddd61f6d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.566791] env[69328]: DEBUG nova.compute.provider_tree [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1009.985256] env[69328]: DEBUG nova.network.neutron [-] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1010.005632] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d4eeb279-2119-46d4-8c39-df9cc3f10eeb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.016894] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95a36dc3-a5e8-4d56-a400-ae3c58e68679 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.051512] env[69328]: DEBUG nova.compute.manager [req-36c6185c-3b99-441c-b2c4-bcfa0f9688aa req-55731f35-cc9b-4cdc-b49e-ad2d07b29f64 service nova] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Detach interface failed, port_id=3a99454a-a8d4-4939-8e61-b21d121522f2, reason: Instance b21ff3c9-d53a-4065-a271-682c2f1b895d could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1010.070305] env[69328]: DEBUG nova.scheduler.client.report [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1010.078351] env[69328]: DEBUG nova.compute.manager [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1010.103487] env[69328]: DEBUG nova.virt.hardware [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1010.103692] env[69328]: DEBUG nova.virt.hardware [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1010.103843] env[69328]: DEBUG nova.virt.hardware [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1010.104050] env[69328]: DEBUG nova.virt.hardware [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1010.104237] env[69328]: DEBUG nova.virt.hardware [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1010.104379] env[69328]: DEBUG nova.virt.hardware [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1010.104585] env[69328]: DEBUG nova.virt.hardware [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1010.104739] env[69328]: DEBUG nova.virt.hardware [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1010.104903] env[69328]: DEBUG nova.virt.hardware [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1010.105084] env[69328]: DEBUG nova.virt.hardware [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1010.105266] env[69328]: DEBUG nova.virt.hardware [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1010.106376] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90f6e6e4-09a4-4fcf-b511-c0c0c587bec8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.115227] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d67619e-3bde-4446-bbba-61a163091d82 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.314791] env[69328]: DEBUG nova.compute.manager [req-be8e4dca-b4dd-45c2-8b3b-8fe8b9709600 req-f19a6aab-89b8-40c4-ada0-fd772138852c service nova] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Received event network-vif-plugged-95776220-5fd9-42a1-8bf9-cfb9fe49d62d {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1010.315018] env[69328]: DEBUG oslo_concurrency.lockutils [req-be8e4dca-b4dd-45c2-8b3b-8fe8b9709600 req-f19a6aab-89b8-40c4-ada0-fd772138852c service nova] Acquiring lock "dc050589-e37a-4798-9532-df4ecfab7eb1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1010.315306] env[69328]: DEBUG oslo_concurrency.lockutils [req-be8e4dca-b4dd-45c2-8b3b-8fe8b9709600 req-f19a6aab-89b8-40c4-ada0-fd772138852c service nova] Lock "dc050589-e37a-4798-9532-df4ecfab7eb1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1010.315493] env[69328]: DEBUG oslo_concurrency.lockutils [req-be8e4dca-b4dd-45c2-8b3b-8fe8b9709600 req-f19a6aab-89b8-40c4-ada0-fd772138852c service nova] Lock "dc050589-e37a-4798-9532-df4ecfab7eb1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1010.315664] env[69328]: DEBUG nova.compute.manager [req-be8e4dca-b4dd-45c2-8b3b-8fe8b9709600 req-f19a6aab-89b8-40c4-ada0-fd772138852c service nova] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] No waiting events found dispatching network-vif-plugged-95776220-5fd9-42a1-8bf9-cfb9fe49d62d {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1010.315829] env[69328]: WARNING nova.compute.manager [req-be8e4dca-b4dd-45c2-8b3b-8fe8b9709600 req-f19a6aab-89b8-40c4-ada0-fd772138852c service nova] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Received unexpected event network-vif-plugged-95776220-5fd9-42a1-8bf9-cfb9fe49d62d for instance with vm_state building and task_state spawning. [ 1010.423415] env[69328]: DEBUG nova.network.neutron [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Successfully updated port: 95776220-5fd9-42a1-8bf9-cfb9fe49d62d {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1010.487972] env[69328]: INFO nova.compute.manager [-] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Took 1.34 seconds to deallocate network for instance. [ 1010.575542] env[69328]: DEBUG oslo_concurrency.lockutils [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.520s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1010.576295] env[69328]: DEBUG nova.compute.manager [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1010.579043] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.628s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1010.580558] env[69328]: INFO nova.compute.claims [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 6b9757de-a274-4f4d-9b73-cc2ca92b4732] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1010.925982] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "refresh_cache-dc050589-e37a-4798-9532-df4ecfab7eb1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1010.926237] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquired lock "refresh_cache-dc050589-e37a-4798-9532-df4ecfab7eb1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1010.926395] env[69328]: DEBUG nova.network.neutron [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1010.997036] env[69328]: DEBUG oslo_concurrency.lockutils [None req-be70f0a8-60ab-4579-aab1-4d42d40bbc27 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1011.087745] env[69328]: DEBUG nova.compute.utils [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1011.091531] env[69328]: DEBUG nova.compute.manager [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1011.091531] env[69328]: DEBUG nova.network.neutron [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: dd43adb3-b073-483a-81dd-69df7f746874] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1011.133572] env[69328]: DEBUG nova.policy [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e5f3619c5c0a41b0a6e7096c0a568c50', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1f5cd4dfb0b54081aba7bf8620521193', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 1011.385146] env[69328]: DEBUG nova.network.neutron [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Successfully created port: e0c14c41-b680-40a2-a769-2b4191814a41 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1011.457965] env[69328]: DEBUG nova.network.neutron [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1011.594387] env[69328]: DEBUG nova.compute.manager [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1011.667987] env[69328]: DEBUG nova.network.neutron [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Updating instance_info_cache with network_info: [{"id": "95776220-5fd9-42a1-8bf9-cfb9fe49d62d", "address": "fa:16:3e:0c:f4:26", "network": {"id": "aed15283-4a79-4e99-8b6c-49cf754138de", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1271042528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30209bc93a4042488f15c73b7e4733d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap95776220-5f", "ovs_interfaceid": "95776220-5fd9-42a1-8bf9-cfb9fe49d62d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1012.025686] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac72512f-f802-4618-b429-df870e95a8f9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.036533] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94cba4a3-23da-4215-82cf-2d86a6c7a944 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.070891] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96a922a8-1000-4285-b699-317baf05cff7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.079610] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-684c566d-dcc6-458e-b343-9b3199f04248 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.093723] env[69328]: DEBUG nova.compute.provider_tree [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1012.173306] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Releasing lock "refresh_cache-dc050589-e37a-4798-9532-df4ecfab7eb1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1012.173641] env[69328]: DEBUG nova.compute.manager [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Instance network_info: |[{"id": "95776220-5fd9-42a1-8bf9-cfb9fe49d62d", "address": "fa:16:3e:0c:f4:26", "network": {"id": "aed15283-4a79-4e99-8b6c-49cf754138de", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1271042528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30209bc93a4042488f15c73b7e4733d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap95776220-5f", "ovs_interfaceid": "95776220-5fd9-42a1-8bf9-cfb9fe49d62d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1012.174409] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0c:f4:26', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'de7fa486-5f28-44ae-b0cf-72234ff87546', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '95776220-5fd9-42a1-8bf9-cfb9fe49d62d', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1012.182227] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1012.182455] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1012.182718] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-92bcfcfc-df32-4499-833a-759c3a4fe782 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.203440] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1012.203440] env[69328]: value = "task-3273803" [ 1012.203440] env[69328]: _type = "Task" [ 1012.203440] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.212158] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273803, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.342858] env[69328]: DEBUG nova.compute.manager [req-889a5fec-296a-4989-89d9-18c9f5454ce1 req-61369d13-a6c8-4f61-b5ab-7e950ecb1d4b service nova] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Received event network-changed-95776220-5fd9-42a1-8bf9-cfb9fe49d62d {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1012.343202] env[69328]: DEBUG nova.compute.manager [req-889a5fec-296a-4989-89d9-18c9f5454ce1 req-61369d13-a6c8-4f61-b5ab-7e950ecb1d4b service nova] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Refreshing instance network info cache due to event network-changed-95776220-5fd9-42a1-8bf9-cfb9fe49d62d. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1012.343448] env[69328]: DEBUG oslo_concurrency.lockutils [req-889a5fec-296a-4989-89d9-18c9f5454ce1 req-61369d13-a6c8-4f61-b5ab-7e950ecb1d4b service nova] Acquiring lock "refresh_cache-dc050589-e37a-4798-9532-df4ecfab7eb1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1012.343592] env[69328]: DEBUG oslo_concurrency.lockutils [req-889a5fec-296a-4989-89d9-18c9f5454ce1 req-61369d13-a6c8-4f61-b5ab-7e950ecb1d4b service nova] Acquired lock "refresh_cache-dc050589-e37a-4798-9532-df4ecfab7eb1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1012.343752] env[69328]: DEBUG nova.network.neutron [req-889a5fec-296a-4989-89d9-18c9f5454ce1 req-61369d13-a6c8-4f61-b5ab-7e950ecb1d4b service nova] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Refreshing network info cache for port 95776220-5fd9-42a1-8bf9-cfb9fe49d62d {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1012.597422] env[69328]: DEBUG nova.scheduler.client.report [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1012.608032] env[69328]: DEBUG nova.compute.manager [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1012.635344] env[69328]: DEBUG nova.virt.hardware [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1012.635587] env[69328]: DEBUG nova.virt.hardware [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1012.635745] env[69328]: DEBUG nova.virt.hardware [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1012.635948] env[69328]: DEBUG nova.virt.hardware [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1012.636123] env[69328]: DEBUG nova.virt.hardware [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1012.636283] env[69328]: DEBUG nova.virt.hardware [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1012.636498] env[69328]: DEBUG nova.virt.hardware [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1012.636659] env[69328]: DEBUG nova.virt.hardware [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1012.636823] env[69328]: DEBUG nova.virt.hardware [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1012.636988] env[69328]: DEBUG nova.virt.hardware [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1012.637181] env[69328]: DEBUG nova.virt.hardware [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1012.638078] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5276ba4-4007-41d6-a81a-939d2ce09dbd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.646479] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57c2307e-7943-4c9a-b804-24bd142eae4a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.713794] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273803, 'name': CreateVM_Task, 'duration_secs': 0.336908} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.714275] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1012.715123] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1012.715460] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1012.715892] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1012.716279] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69a512b7-99fc-4454-95bf-618fc19080a9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.721295] env[69328]: DEBUG oslo_vmware.api [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for the task: (returnval){ [ 1012.721295] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52abb3c0-fd30-75d4-6fb9-0a0bd1ccadc7" [ 1012.721295] env[69328]: _type = "Task" [ 1012.721295] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.730342] env[69328]: DEBUG oslo_vmware.api [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52abb3c0-fd30-75d4-6fb9-0a0bd1ccadc7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.974055] env[69328]: DEBUG nova.network.neutron [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Successfully updated port: e0c14c41-b680-40a2-a769-2b4191814a41 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1013.102191] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.523s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1013.102748] env[69328]: DEBUG nova.compute.manager [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 6b9757de-a274-4f4d-9b73-cc2ca92b4732] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1013.109039] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.752s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1013.109039] env[69328]: INFO nova.compute.claims [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 19f537b7-90fc-4832-b137-e042e00a508b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1013.133656] env[69328]: DEBUG nova.network.neutron [req-889a5fec-296a-4989-89d9-18c9f5454ce1 req-61369d13-a6c8-4f61-b5ab-7e950ecb1d4b service nova] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Updated VIF entry in instance network info cache for port 95776220-5fd9-42a1-8bf9-cfb9fe49d62d. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1013.134150] env[69328]: DEBUG nova.network.neutron [req-889a5fec-296a-4989-89d9-18c9f5454ce1 req-61369d13-a6c8-4f61-b5ab-7e950ecb1d4b service nova] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Updating instance_info_cache with network_info: [{"id": "95776220-5fd9-42a1-8bf9-cfb9fe49d62d", "address": "fa:16:3e:0c:f4:26", "network": {"id": "aed15283-4a79-4e99-8b6c-49cf754138de", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1271042528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30209bc93a4042488f15c73b7e4733d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap95776220-5f", "ovs_interfaceid": "95776220-5fd9-42a1-8bf9-cfb9fe49d62d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1013.231802] env[69328]: DEBUG oslo_vmware.api [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52abb3c0-fd30-75d4-6fb9-0a0bd1ccadc7, 'name': SearchDatastore_Task, 'duration_secs': 0.01097} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.232129] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1013.232367] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1013.232602] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1013.232760] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1013.232961] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1013.233237] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fb4335cd-0bac-4c2f-8083-c0810b57b662 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.241542] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1013.241729] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1013.242452] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12f4d632-4b47-4bae-869c-1cd7c32e9994 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.247514] env[69328]: DEBUG oslo_vmware.api [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for the task: (returnval){ [ 1013.247514] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ba3e45-52fb-65e4-52c9-7410749dfa3a" [ 1013.247514] env[69328]: _type = "Task" [ 1013.247514] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.255506] env[69328]: DEBUG oslo_vmware.api [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ba3e45-52fb-65e4-52c9-7410749dfa3a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.480879] env[69328]: DEBUG oslo_concurrency.lockutils [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Acquiring lock "refresh_cache-dd43adb3-b073-483a-81dd-69df7f746874" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1013.481155] env[69328]: DEBUG oslo_concurrency.lockutils [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Acquired lock "refresh_cache-dd43adb3-b073-483a-81dd-69df7f746874" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1013.481468] env[69328]: DEBUG nova.network.neutron [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1013.609217] env[69328]: DEBUG nova.compute.utils [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1013.609886] env[69328]: DEBUG nova.compute.manager [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 6b9757de-a274-4f4d-9b73-cc2ca92b4732] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1013.610391] env[69328]: DEBUG nova.network.neutron [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 6b9757de-a274-4f4d-9b73-cc2ca92b4732] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1013.637460] env[69328]: DEBUG oslo_concurrency.lockutils [req-889a5fec-296a-4989-89d9-18c9f5454ce1 req-61369d13-a6c8-4f61-b5ab-7e950ecb1d4b service nova] Releasing lock "refresh_cache-dc050589-e37a-4798-9532-df4ecfab7eb1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1013.650293] env[69328]: DEBUG nova.policy [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e5f3619c5c0a41b0a6e7096c0a568c50', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1f5cd4dfb0b54081aba7bf8620521193', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 1013.758374] env[69328]: DEBUG oslo_vmware.api [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ba3e45-52fb-65e4-52c9-7410749dfa3a, 'name': SearchDatastore_Task, 'duration_secs': 0.009184} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.759210] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a237c23e-c875-45ad-9c97-f250b94b25d1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.765489] env[69328]: DEBUG oslo_vmware.api [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for the task: (returnval){ [ 1013.765489] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d192ae-56e8-ee02-9885-fb2ad62778b8" [ 1013.765489] env[69328]: _type = "Task" [ 1013.765489] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.773402] env[69328]: DEBUG oslo_vmware.api [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d192ae-56e8-ee02-9885-fb2ad62778b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.931930] env[69328]: DEBUG nova.network.neutron [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 6b9757de-a274-4f4d-9b73-cc2ca92b4732] Successfully created port: 51df3c9d-fc9a-47c4-83a8-917ec6fedbbf {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1014.033089] env[69328]: DEBUG nova.network.neutron [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1014.112973] env[69328]: DEBUG nova.compute.manager [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 6b9757de-a274-4f4d-9b73-cc2ca92b4732] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1014.226815] env[69328]: DEBUG nova.network.neutron [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Updating instance_info_cache with network_info: [{"id": "e0c14c41-b680-40a2-a769-2b4191814a41", "address": "fa:16:3e:1b:39:92", "network": {"id": "bd599844-1a77-4107-a344-7b4ffd53ea20", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1763799815-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f5cd4dfb0b54081aba7bf8620521193", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b83383f-ed7a-4efd-aef7-aa8c15649d07", "external-id": "nsx-vlan-transportzone-282", "segmentation_id": 282, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0c14c41-b6", "ovs_interfaceid": "e0c14c41-b680-40a2-a769-2b4191814a41", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1014.282717] env[69328]: DEBUG oslo_vmware.api [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d192ae-56e8-ee02-9885-fb2ad62778b8, 'name': SearchDatastore_Task, 'duration_secs': 0.010027} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.283185] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1014.283460] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] dc050589-e37a-4798-9532-df4ecfab7eb1/dc050589-e37a-4798-9532-df4ecfab7eb1.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1014.283733] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3cf0e156-5a17-4279-a084-945a7b810526 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.292975] env[69328]: DEBUG oslo_vmware.api [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for the task: (returnval){ [ 1014.292975] env[69328]: value = "task-3273804" [ 1014.292975] env[69328]: _type = "Task" [ 1014.292975] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.308593] env[69328]: DEBUG oslo_vmware.api [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273804, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.369096] env[69328]: DEBUG nova.compute.manager [req-ddf2f6e8-0498-400c-ad4b-96b15a922a8f req-919aeb38-56e7-491b-9a4b-255b05a746e0 service nova] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Received event network-vif-plugged-e0c14c41-b680-40a2-a769-2b4191814a41 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1014.369317] env[69328]: DEBUG oslo_concurrency.lockutils [req-ddf2f6e8-0498-400c-ad4b-96b15a922a8f req-919aeb38-56e7-491b-9a4b-255b05a746e0 service nova] Acquiring lock "dd43adb3-b073-483a-81dd-69df7f746874-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1014.369661] env[69328]: DEBUG oslo_concurrency.lockutils [req-ddf2f6e8-0498-400c-ad4b-96b15a922a8f req-919aeb38-56e7-491b-9a4b-255b05a746e0 service nova] Lock "dd43adb3-b073-483a-81dd-69df7f746874-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1014.369745] env[69328]: DEBUG oslo_concurrency.lockutils [req-ddf2f6e8-0498-400c-ad4b-96b15a922a8f req-919aeb38-56e7-491b-9a4b-255b05a746e0 service nova] Lock "dd43adb3-b073-483a-81dd-69df7f746874-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1014.369912] env[69328]: DEBUG nova.compute.manager [req-ddf2f6e8-0498-400c-ad4b-96b15a922a8f req-919aeb38-56e7-491b-9a4b-255b05a746e0 service nova] [instance: dd43adb3-b073-483a-81dd-69df7f746874] No waiting events found dispatching network-vif-plugged-e0c14c41-b680-40a2-a769-2b4191814a41 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1014.370287] env[69328]: WARNING nova.compute.manager [req-ddf2f6e8-0498-400c-ad4b-96b15a922a8f req-919aeb38-56e7-491b-9a4b-255b05a746e0 service nova] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Received unexpected event network-vif-plugged-e0c14c41-b680-40a2-a769-2b4191814a41 for instance with vm_state building and task_state spawning. [ 1014.370287] env[69328]: DEBUG nova.compute.manager [req-ddf2f6e8-0498-400c-ad4b-96b15a922a8f req-919aeb38-56e7-491b-9a4b-255b05a746e0 service nova] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Received event network-changed-e0c14c41-b680-40a2-a769-2b4191814a41 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1014.370468] env[69328]: DEBUG nova.compute.manager [req-ddf2f6e8-0498-400c-ad4b-96b15a922a8f req-919aeb38-56e7-491b-9a4b-255b05a746e0 service nova] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Refreshing instance network info cache due to event network-changed-e0c14c41-b680-40a2-a769-2b4191814a41. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1014.370647] env[69328]: DEBUG oslo_concurrency.lockutils [req-ddf2f6e8-0498-400c-ad4b-96b15a922a8f req-919aeb38-56e7-491b-9a4b-255b05a746e0 service nova] Acquiring lock "refresh_cache-dd43adb3-b073-483a-81dd-69df7f746874" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1014.517151] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d9960c3-4910-4f74-b9fb-a6750df6db70 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.528320] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7ae8c6b-a193-498b-ab53-f796dc26253c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.557863] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e786bb72-d572-405e-b56c-f62102828049 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.565245] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33337876-1e7d-49c8-b10b-7f0f2b600ce9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.578083] env[69328]: DEBUG nova.compute.provider_tree [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1014.733249] env[69328]: DEBUG oslo_concurrency.lockutils [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Releasing lock "refresh_cache-dd43adb3-b073-483a-81dd-69df7f746874" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1014.733737] env[69328]: DEBUG nova.compute.manager [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Instance network_info: |[{"id": "e0c14c41-b680-40a2-a769-2b4191814a41", "address": "fa:16:3e:1b:39:92", "network": {"id": "bd599844-1a77-4107-a344-7b4ffd53ea20", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1763799815-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f5cd4dfb0b54081aba7bf8620521193", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b83383f-ed7a-4efd-aef7-aa8c15649d07", "external-id": "nsx-vlan-transportzone-282", "segmentation_id": 282, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0c14c41-b6", "ovs_interfaceid": "e0c14c41-b680-40a2-a769-2b4191814a41", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1014.734099] env[69328]: DEBUG oslo_concurrency.lockutils [req-ddf2f6e8-0498-400c-ad4b-96b15a922a8f req-919aeb38-56e7-491b-9a4b-255b05a746e0 service nova] Acquired lock "refresh_cache-dd43adb3-b073-483a-81dd-69df7f746874" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1014.734315] env[69328]: DEBUG nova.network.neutron [req-ddf2f6e8-0498-400c-ad4b-96b15a922a8f req-919aeb38-56e7-491b-9a4b-255b05a746e0 service nova] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Refreshing network info cache for port e0c14c41-b680-40a2-a769-2b4191814a41 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1014.735902] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1b:39:92', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7b83383f-ed7a-4efd-aef7-aa8c15649d07', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e0c14c41-b680-40a2-a769-2b4191814a41', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1014.744538] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Creating folder: Project (1f5cd4dfb0b54081aba7bf8620521193). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1014.748153] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-464c7032-85f5-4797-bdb5-502986966be3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.764833] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Created folder: Project (1f5cd4dfb0b54081aba7bf8620521193) in parent group-v653649. [ 1014.765147] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Creating folder: Instances. Parent ref: group-v653899. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1014.765413] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-580a1ec6-5ef8-446f-9a75-ea4a3d5bdb50 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.781778] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Created folder: Instances in parent group-v653899. [ 1014.782100] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1014.784788] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1014.785428] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cfe2d3f8-36c9-4995-86db-b4b9450934fa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.810431] env[69328]: DEBUG oslo_vmware.api [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273804, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.812070] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1014.812070] env[69328]: value = "task-3273807" [ 1014.812070] env[69328]: _type = "Task" [ 1014.812070] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.823922] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273807, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.997055] env[69328]: DEBUG nova.network.neutron [req-ddf2f6e8-0498-400c-ad4b-96b15a922a8f req-919aeb38-56e7-491b-9a4b-255b05a746e0 service nova] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Updated VIF entry in instance network info cache for port e0c14c41-b680-40a2-a769-2b4191814a41. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1014.997457] env[69328]: DEBUG nova.network.neutron [req-ddf2f6e8-0498-400c-ad4b-96b15a922a8f req-919aeb38-56e7-491b-9a4b-255b05a746e0 service nova] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Updating instance_info_cache with network_info: [{"id": "e0c14c41-b680-40a2-a769-2b4191814a41", "address": "fa:16:3e:1b:39:92", "network": {"id": "bd599844-1a77-4107-a344-7b4ffd53ea20", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1763799815-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f5cd4dfb0b54081aba7bf8620521193", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b83383f-ed7a-4efd-aef7-aa8c15649d07", "external-id": "nsx-vlan-transportzone-282", "segmentation_id": 282, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0c14c41-b6", "ovs_interfaceid": "e0c14c41-b680-40a2-a769-2b4191814a41", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1015.081315] env[69328]: DEBUG nova.scheduler.client.report [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1015.122121] env[69328]: DEBUG nova.compute.manager [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 6b9757de-a274-4f4d-9b73-cc2ca92b4732] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1015.149435] env[69328]: DEBUG nova.virt.hardware [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1015.149836] env[69328]: DEBUG nova.virt.hardware [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1015.150118] env[69328]: DEBUG nova.virt.hardware [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1015.150426] env[69328]: DEBUG nova.virt.hardware [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1015.150667] env[69328]: DEBUG nova.virt.hardware [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1015.150950] env[69328]: DEBUG nova.virt.hardware [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1015.151365] env[69328]: DEBUG nova.virt.hardware [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1015.151678] env[69328]: DEBUG nova.virt.hardware [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1015.151995] env[69328]: DEBUG nova.virt.hardware [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1015.152347] env[69328]: DEBUG nova.virt.hardware [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1015.152670] env[69328]: DEBUG nova.virt.hardware [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1015.154568] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09dc9c7c-fe17-4afa-b5d0-709bb7acfb82 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.163954] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89a6db46-f19c-46e8-b818-aae87243b475 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.311691] env[69328]: DEBUG oslo_vmware.api [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273804, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.851674} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.311919] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] dc050589-e37a-4798-9532-df4ecfab7eb1/dc050589-e37a-4798-9532-df4ecfab7eb1.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1015.312137] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1015.312431] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fce711be-d6c6-4caa-837a-f1edee1845fa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.323158] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273807, 'name': CreateVM_Task, 'duration_secs': 0.454389} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.324177] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1015.324490] env[69328]: DEBUG oslo_vmware.api [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for the task: (returnval){ [ 1015.324490] env[69328]: value = "task-3273808" [ 1015.324490] env[69328]: _type = "Task" [ 1015.324490] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.325100] env[69328]: DEBUG oslo_concurrency.lockutils [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1015.325266] env[69328]: DEBUG oslo_concurrency.lockutils [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1015.325579] env[69328]: DEBUG oslo_concurrency.lockutils [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1015.325867] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8efae157-9abe-404c-8a86-fba7abccb7fc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.336266] env[69328]: DEBUG oslo_vmware.api [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273808, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.337465] env[69328]: DEBUG oslo_vmware.api [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Waiting for the task: (returnval){ [ 1015.337465] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ace57d-47c0-6f70-829f-320cb50c0015" [ 1015.337465] env[69328]: _type = "Task" [ 1015.337465] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.346545] env[69328]: DEBUG oslo_vmware.api [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ace57d-47c0-6f70-829f-320cb50c0015, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.421034] env[69328]: DEBUG nova.compute.manager [req-d7a61ae8-ae97-4d49-9e8e-6e520efaa331 req-f85163f5-8059-4c6a-ba2a-e34901a5f97a service nova] [instance: 6b9757de-a274-4f4d-9b73-cc2ca92b4732] Received event network-vif-plugged-51df3c9d-fc9a-47c4-83a8-917ec6fedbbf {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1015.421225] env[69328]: DEBUG oslo_concurrency.lockutils [req-d7a61ae8-ae97-4d49-9e8e-6e520efaa331 req-f85163f5-8059-4c6a-ba2a-e34901a5f97a service nova] Acquiring lock "6b9757de-a274-4f4d-9b73-cc2ca92b4732-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1015.421469] env[69328]: DEBUG oslo_concurrency.lockutils [req-d7a61ae8-ae97-4d49-9e8e-6e520efaa331 req-f85163f5-8059-4c6a-ba2a-e34901a5f97a service nova] Lock "6b9757de-a274-4f4d-9b73-cc2ca92b4732-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1015.421650] env[69328]: DEBUG oslo_concurrency.lockutils [req-d7a61ae8-ae97-4d49-9e8e-6e520efaa331 req-f85163f5-8059-4c6a-ba2a-e34901a5f97a service nova] Lock "6b9757de-a274-4f4d-9b73-cc2ca92b4732-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1015.421867] env[69328]: DEBUG nova.compute.manager [req-d7a61ae8-ae97-4d49-9e8e-6e520efaa331 req-f85163f5-8059-4c6a-ba2a-e34901a5f97a service nova] [instance: 6b9757de-a274-4f4d-9b73-cc2ca92b4732] No waiting events found dispatching network-vif-plugged-51df3c9d-fc9a-47c4-83a8-917ec6fedbbf {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1015.422194] env[69328]: WARNING nova.compute.manager [req-d7a61ae8-ae97-4d49-9e8e-6e520efaa331 req-f85163f5-8059-4c6a-ba2a-e34901a5f97a service nova] [instance: 6b9757de-a274-4f4d-9b73-cc2ca92b4732] Received unexpected event network-vif-plugged-51df3c9d-fc9a-47c4-83a8-917ec6fedbbf for instance with vm_state building and task_state spawning. [ 1015.501297] env[69328]: DEBUG nova.network.neutron [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 6b9757de-a274-4f4d-9b73-cc2ca92b4732] Successfully updated port: 51df3c9d-fc9a-47c4-83a8-917ec6fedbbf {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1015.501297] env[69328]: DEBUG oslo_concurrency.lockutils [req-ddf2f6e8-0498-400c-ad4b-96b15a922a8f req-919aeb38-56e7-491b-9a4b-255b05a746e0 service nova] Releasing lock "refresh_cache-dd43adb3-b073-483a-81dd-69df7f746874" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1015.586694] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.481s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1015.587230] env[69328]: DEBUG nova.compute.manager [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 19f537b7-90fc-4832-b137-e042e00a508b] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1015.590244] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.057s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1015.590413] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1015.592602] env[69328]: DEBUG oslo_concurrency.lockutils [None req-dda2c9ea-ab23-4526-ba73-3282180787b9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 13.909s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1015.835493] env[69328]: DEBUG oslo_vmware.api [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273808, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066188} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.835904] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1015.836558] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29f0b66e-5266-49d3-ad1f-bbd72407329b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.855211] env[69328]: DEBUG oslo_vmware.api [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ace57d-47c0-6f70-829f-320cb50c0015, 'name': SearchDatastore_Task, 'duration_secs': 0.013254} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.863588] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Reconfiguring VM instance instance-0000005c to attach disk [datastore1] dc050589-e37a-4798-9532-df4ecfab7eb1/dc050589-e37a-4798-9532-df4ecfab7eb1.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1015.863899] env[69328]: DEBUG oslo_concurrency.lockutils [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1015.864136] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1015.864371] env[69328]: DEBUG oslo_concurrency.lockutils [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1015.864516] env[69328]: DEBUG oslo_concurrency.lockutils [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1015.864689] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1015.864917] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-40590c78-dfe2-485a-b442-2825dc226ef1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.878368] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4eb76aed-9481-40be-89d9-2c3ea7ccf521 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.889511] env[69328]: DEBUG oslo_vmware.api [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for the task: (returnval){ [ 1015.889511] env[69328]: value = "task-3273809" [ 1015.889511] env[69328]: _type = "Task" [ 1015.889511] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.893719] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1015.893898] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1015.894934] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e120ed9-e27d-4a09-bbdd-e74c4ea9de68 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.900164] env[69328]: DEBUG oslo_vmware.api [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273809, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.903270] env[69328]: DEBUG oslo_vmware.api [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Waiting for the task: (returnval){ [ 1015.903270] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52658409-0356-febc-481b-90608869b025" [ 1015.903270] env[69328]: _type = "Task" [ 1015.903270] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.911606] env[69328]: DEBUG oslo_vmware.api [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52658409-0356-febc-481b-90608869b025, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.003971] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Acquiring lock "refresh_cache-6b9757de-a274-4f4d-9b73-cc2ca92b4732" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1016.004177] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Acquired lock "refresh_cache-6b9757de-a274-4f4d-9b73-cc2ca92b4732" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1016.004348] env[69328]: DEBUG nova.network.neutron [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 6b9757de-a274-4f4d-9b73-cc2ca92b4732] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1016.096702] env[69328]: DEBUG nova.compute.utils [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1016.101778] env[69328]: DEBUG nova.compute.manager [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 19f537b7-90fc-4832-b137-e042e00a508b] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1016.101967] env[69328]: DEBUG nova.network.neutron [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 19f537b7-90fc-4832-b137-e042e00a508b] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1016.104883] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fe352a88-e6f7-41ef-a8e1-163836014156 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Lock "a0952fdf-5570-4112-bc4d-e9f9cee1599c" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 37.298s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1016.106025] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Lock "a0952fdf-5570-4112-bc4d-e9f9cee1599c" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 12.589s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1016.106214] env[69328]: INFO nova.compute.manager [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Unshelving [ 1016.173176] env[69328]: DEBUG nova.policy [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e5f3619c5c0a41b0a6e7096c0a568c50', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1f5cd4dfb0b54081aba7bf8620521193', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 1016.405297] env[69328]: DEBUG oslo_vmware.api [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273809, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.416097] env[69328]: DEBUG oslo_vmware.api [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52658409-0356-febc-481b-90608869b025, 'name': SearchDatastore_Task, 'duration_secs': 0.010319} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.416944] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6a6ab78-04d6-4549-aabe-4f1dc7581b24 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.423090] env[69328]: DEBUG oslo_vmware.api [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Waiting for the task: (returnval){ [ 1016.423090] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]521b6908-1c0e-3560-058b-dd06133e36f4" [ 1016.423090] env[69328]: _type = "Task" [ 1016.423090] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.437393] env[69328]: DEBUG oslo_vmware.api [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]521b6908-1c0e-3560-058b-dd06133e36f4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.496935] env[69328]: DEBUG nova.network.neutron [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 19f537b7-90fc-4832-b137-e042e00a508b] Successfully created port: 5fae6e5f-3223-4872-83ba-b127e3c15d40 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1016.529323] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cbc66f4-f0bb-4eea-b757-5d5749e81e5b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.538563] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f547ac83-f536-409d-a75e-9f0fd658b002 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.541857] env[69328]: DEBUG nova.network.neutron [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 6b9757de-a274-4f4d-9b73-cc2ca92b4732] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1016.575978] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac9dbc4b-001d-45d7-a8ae-7f1f4b81b32f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.584995] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-918478de-9f8b-4cc7-a725-a1fcab0b5825 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.599163] env[69328]: DEBUG nova.compute.provider_tree [None req-dda2c9ea-ab23-4526-ba73-3282180787b9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1016.605610] env[69328]: DEBUG nova.compute.manager [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 19f537b7-90fc-4832-b137-e042e00a508b] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1016.750419] env[69328]: DEBUG nova.network.neutron [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 6b9757de-a274-4f4d-9b73-cc2ca92b4732] Updating instance_info_cache with network_info: [{"id": "51df3c9d-fc9a-47c4-83a8-917ec6fedbbf", "address": "fa:16:3e:d9:09:00", "network": {"id": "bd599844-1a77-4107-a344-7b4ffd53ea20", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1763799815-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f5cd4dfb0b54081aba7bf8620521193", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b83383f-ed7a-4efd-aef7-aa8c15649d07", "external-id": "nsx-vlan-transportzone-282", "segmentation_id": 282, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap51df3c9d-fc", "ovs_interfaceid": "51df3c9d-fc9a-47c4-83a8-917ec6fedbbf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1016.904094] env[69328]: DEBUG oslo_vmware.api [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273809, 'name': ReconfigVM_Task, 'duration_secs': 0.73915} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.904094] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Reconfigured VM instance instance-0000005c to attach disk [datastore1] dc050589-e37a-4798-9532-df4ecfab7eb1/dc050589-e37a-4798-9532-df4ecfab7eb1.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1016.904094] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9d4d3527-b077-4cd8-977e-144c07cc5ebe {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.911227] env[69328]: DEBUG oslo_vmware.api [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for the task: (returnval){ [ 1016.911227] env[69328]: value = "task-3273810" [ 1016.911227] env[69328]: _type = "Task" [ 1016.911227] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.920148] env[69328]: DEBUG oslo_vmware.api [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273810, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.933013] env[69328]: DEBUG oslo_vmware.api [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]521b6908-1c0e-3560-058b-dd06133e36f4, 'name': SearchDatastore_Task, 'duration_secs': 0.054039} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.933290] env[69328]: DEBUG oslo_concurrency.lockutils [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1016.933552] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] dd43adb3-b073-483a-81dd-69df7f746874/dd43adb3-b073-483a-81dd-69df7f746874.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1016.933803] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-528080a5-fbbd-4f33-b778-b07f7168b87d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.940353] env[69328]: DEBUG oslo_vmware.api [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Waiting for the task: (returnval){ [ 1016.940353] env[69328]: value = "task-3273811" [ 1016.940353] env[69328]: _type = "Task" [ 1016.940353] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.947917] env[69328]: DEBUG oslo_vmware.api [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273811, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.103425] env[69328]: DEBUG nova.scheduler.client.report [None req-dda2c9ea-ab23-4526-ba73-3282180787b9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1017.132101] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1017.253793] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Releasing lock "refresh_cache-6b9757de-a274-4f4d-9b73-cc2ca92b4732" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1017.254179] env[69328]: DEBUG nova.compute.manager [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 6b9757de-a274-4f4d-9b73-cc2ca92b4732] Instance network_info: |[{"id": "51df3c9d-fc9a-47c4-83a8-917ec6fedbbf", "address": "fa:16:3e:d9:09:00", "network": {"id": "bd599844-1a77-4107-a344-7b4ffd53ea20", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1763799815-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f5cd4dfb0b54081aba7bf8620521193", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b83383f-ed7a-4efd-aef7-aa8c15649d07", "external-id": "nsx-vlan-transportzone-282", "segmentation_id": 282, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap51df3c9d-fc", "ovs_interfaceid": "51df3c9d-fc9a-47c4-83a8-917ec6fedbbf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1017.254734] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 6b9757de-a274-4f4d-9b73-cc2ca92b4732] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d9:09:00', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7b83383f-ed7a-4efd-aef7-aa8c15649d07', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '51df3c9d-fc9a-47c4-83a8-917ec6fedbbf', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1017.263416] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1017.263741] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b9757de-a274-4f4d-9b73-cc2ca92b4732] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1017.264069] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-453ca491-866f-4da7-bc05-4ce75f463f0e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.291897] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1017.291897] env[69328]: value = "task-3273812" [ 1017.291897] env[69328]: _type = "Task" [ 1017.291897] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.302034] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273812, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.423084] env[69328]: DEBUG oslo_vmware.api [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273810, 'name': Rename_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.450312] env[69328]: DEBUG oslo_vmware.api [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273811, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.488137} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.450587] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] dd43adb3-b073-483a-81dd-69df7f746874/dd43adb3-b073-483a-81dd-69df7f746874.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1017.450807] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1017.451075] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6ac42573-9861-46ee-a5a5-baafa1ca7739 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.458472] env[69328]: DEBUG oslo_vmware.api [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Waiting for the task: (returnval){ [ 1017.458472] env[69328]: value = "task-3273813" [ 1017.458472] env[69328]: _type = "Task" [ 1017.458472] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.463844] env[69328]: DEBUG nova.compute.manager [req-797c7690-17d4-4542-a446-775e291cf6fb req-e71ddad3-d4b5-4de3-ab39-998e5b757a54 service nova] [instance: 6b9757de-a274-4f4d-9b73-cc2ca92b4732] Received event network-changed-51df3c9d-fc9a-47c4-83a8-917ec6fedbbf {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1017.464043] env[69328]: DEBUG nova.compute.manager [req-797c7690-17d4-4542-a446-775e291cf6fb req-e71ddad3-d4b5-4de3-ab39-998e5b757a54 service nova] [instance: 6b9757de-a274-4f4d-9b73-cc2ca92b4732] Refreshing instance network info cache due to event network-changed-51df3c9d-fc9a-47c4-83a8-917ec6fedbbf. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1017.464266] env[69328]: DEBUG oslo_concurrency.lockutils [req-797c7690-17d4-4542-a446-775e291cf6fb req-e71ddad3-d4b5-4de3-ab39-998e5b757a54 service nova] Acquiring lock "refresh_cache-6b9757de-a274-4f4d-9b73-cc2ca92b4732" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1017.464410] env[69328]: DEBUG oslo_concurrency.lockutils [req-797c7690-17d4-4542-a446-775e291cf6fb req-e71ddad3-d4b5-4de3-ab39-998e5b757a54 service nova] Acquired lock "refresh_cache-6b9757de-a274-4f4d-9b73-cc2ca92b4732" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1017.464571] env[69328]: DEBUG nova.network.neutron [req-797c7690-17d4-4542-a446-775e291cf6fb req-e71ddad3-d4b5-4de3-ab39-998e5b757a54 service nova] [instance: 6b9757de-a274-4f4d-9b73-cc2ca92b4732] Refreshing network info cache for port 51df3c9d-fc9a-47c4-83a8-917ec6fedbbf {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1017.471274] env[69328]: DEBUG oslo_vmware.api [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273813, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.616235] env[69328]: DEBUG nova.compute.manager [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 19f537b7-90fc-4832-b137-e042e00a508b] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1017.645383] env[69328]: DEBUG nova.virt.hardware [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:34:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1017.645743] env[69328]: DEBUG nova.virt.hardware [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1017.646484] env[69328]: DEBUG nova.virt.hardware [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1017.646484] env[69328]: DEBUG nova.virt.hardware [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1017.646484] env[69328]: DEBUG nova.virt.hardware [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1017.646717] env[69328]: DEBUG nova.virt.hardware [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1017.646938] env[69328]: DEBUG nova.virt.hardware [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1017.647388] env[69328]: DEBUG nova.virt.hardware [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1017.647521] env[69328]: DEBUG nova.virt.hardware [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1017.647799] env[69328]: DEBUG nova.virt.hardware [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1017.648039] env[69328]: DEBUG nova.virt.hardware [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1017.649785] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b7f9051-f698-4b05-85c9-ed573dc66e54 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.658149] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed8994d3-2a94-4be3-b6e1-c9ce8f269b04 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.803078] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273812, 'name': CreateVM_Task, 'duration_secs': 0.410122} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.803321] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b9757de-a274-4f4d-9b73-cc2ca92b4732] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1017.803923] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1017.804365] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1017.804439] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1017.804784] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c97452f8-7ab6-4b7b-9e05-9e49f2738fe5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.809318] env[69328]: DEBUG oslo_vmware.api [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Waiting for the task: (returnval){ [ 1017.809318] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]524e318a-16b8-20f8-2d6a-9c8d15382989" [ 1017.809318] env[69328]: _type = "Task" [ 1017.809318] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.817343] env[69328]: DEBUG oslo_vmware.api [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]524e318a-16b8-20f8-2d6a-9c8d15382989, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.922552] env[69328]: DEBUG oslo_vmware.api [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273810, 'name': Rename_Task, 'duration_secs': 0.781314} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.922910] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1017.923229] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7ac61a27-4074-49fb-b452-d9cb0a9491c4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.930616] env[69328]: DEBUG oslo_vmware.api [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for the task: (returnval){ [ 1017.930616] env[69328]: value = "task-3273814" [ 1017.930616] env[69328]: _type = "Task" [ 1017.930616] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.938590] env[69328]: DEBUG oslo_vmware.api [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273814, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.967870] env[69328]: DEBUG oslo_vmware.api [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273813, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062673} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.968182] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1017.971152] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9df8fb42-5932-497b-9536-d9b4ce0518e9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.993824] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] dd43adb3-b073-483a-81dd-69df7f746874/dd43adb3-b073-483a-81dd-69df7f746874.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1017.994688] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-86d8da44-89a7-43cc-b7d9-165839ab8973 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.017239] env[69328]: DEBUG oslo_vmware.api [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Waiting for the task: (returnval){ [ 1018.017239] env[69328]: value = "task-3273815" [ 1018.017239] env[69328]: _type = "Task" [ 1018.017239] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.021095] env[69328]: DEBUG nova.network.neutron [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 19f537b7-90fc-4832-b137-e042e00a508b] Successfully updated port: 5fae6e5f-3223-4872-83ba-b127e3c15d40 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1018.027871] env[69328]: DEBUG oslo_vmware.api [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273815, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.115246] env[69328]: DEBUG oslo_concurrency.lockutils [None req-dda2c9ea-ab23-4526-ba73-3282180787b9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.522s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1018.121322] env[69328]: DEBUG oslo_concurrency.lockutils [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.860s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1018.121606] env[69328]: DEBUG nova.objects.instance [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lazy-loading 'resources' on Instance uuid 76210566-12d7-4f6a-afa1-6329e87e0f85 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1018.184902] env[69328]: DEBUG nova.network.neutron [req-797c7690-17d4-4542-a446-775e291cf6fb req-e71ddad3-d4b5-4de3-ab39-998e5b757a54 service nova] [instance: 6b9757de-a274-4f4d-9b73-cc2ca92b4732] Updated VIF entry in instance network info cache for port 51df3c9d-fc9a-47c4-83a8-917ec6fedbbf. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1018.185338] env[69328]: DEBUG nova.network.neutron [req-797c7690-17d4-4542-a446-775e291cf6fb req-e71ddad3-d4b5-4de3-ab39-998e5b757a54 service nova] [instance: 6b9757de-a274-4f4d-9b73-cc2ca92b4732] Updating instance_info_cache with network_info: [{"id": "51df3c9d-fc9a-47c4-83a8-917ec6fedbbf", "address": "fa:16:3e:d9:09:00", "network": {"id": "bd599844-1a77-4107-a344-7b4ffd53ea20", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1763799815-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f5cd4dfb0b54081aba7bf8620521193", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b83383f-ed7a-4efd-aef7-aa8c15649d07", "external-id": "nsx-vlan-transportzone-282", "segmentation_id": 282, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap51df3c9d-fc", "ovs_interfaceid": "51df3c9d-fc9a-47c4-83a8-917ec6fedbbf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1018.322269] env[69328]: DEBUG oslo_vmware.api [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]524e318a-16b8-20f8-2d6a-9c8d15382989, 'name': SearchDatastore_Task, 'duration_secs': 0.009619} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.322592] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1018.322868] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 6b9757de-a274-4f4d-9b73-cc2ca92b4732] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1018.323148] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1018.323304] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1018.323483] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1018.323753] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e5722b24-a237-417a-bbd2-edc5d00e6c21 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.334537] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1018.334748] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1018.335538] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b14ddf5a-8a91-4e0b-904c-4c60e8cee44e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.342744] env[69328]: DEBUG oslo_vmware.api [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Waiting for the task: (returnval){ [ 1018.342744] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b1aa1c-b1d8-13fa-b537-bb8952f8828e" [ 1018.342744] env[69328]: _type = "Task" [ 1018.342744] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.351297] env[69328]: DEBUG oslo_vmware.api [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b1aa1c-b1d8-13fa-b537-bb8952f8828e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.441811] env[69328]: DEBUG oslo_vmware.api [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273814, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.529909] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Acquiring lock "refresh_cache-19f537b7-90fc-4832-b137-e042e00a508b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1018.530244] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Acquired lock "refresh_cache-19f537b7-90fc-4832-b137-e042e00a508b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1018.530525] env[69328]: DEBUG nova.network.neutron [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 19f537b7-90fc-4832-b137-e042e00a508b] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1018.531840] env[69328]: DEBUG oslo_vmware.api [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273815, 'name': ReconfigVM_Task, 'duration_secs': 0.38402} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.532449] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Reconfigured VM instance instance-0000005d to attach disk [datastore1] dd43adb3-b073-483a-81dd-69df7f746874/dd43adb3-b073-483a-81dd-69df7f746874.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1018.533225] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d0982593-c88a-4190-8c16-dadb3a2c2566 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.540749] env[69328]: DEBUG oslo_vmware.api [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Waiting for the task: (returnval){ [ 1018.540749] env[69328]: value = "task-3273816" [ 1018.540749] env[69328]: _type = "Task" [ 1018.540749] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.552055] env[69328]: DEBUG oslo_vmware.api [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273816, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.626953] env[69328]: DEBUG nova.objects.instance [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lazy-loading 'numa_topology' on Instance uuid 76210566-12d7-4f6a-afa1-6329e87e0f85 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1018.682346] env[69328]: INFO nova.scheduler.client.report [None req-dda2c9ea-ab23-4526-ba73-3282180787b9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Deleted allocation for migration d9a4d9ef-a86e-49ef-841e-1e4093b2e6d7 [ 1018.688888] env[69328]: DEBUG oslo_concurrency.lockutils [req-797c7690-17d4-4542-a446-775e291cf6fb req-e71ddad3-d4b5-4de3-ab39-998e5b757a54 service nova] Releasing lock "refresh_cache-6b9757de-a274-4f4d-9b73-cc2ca92b4732" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1018.853369] env[69328]: DEBUG oslo_vmware.api [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b1aa1c-b1d8-13fa-b537-bb8952f8828e, 'name': SearchDatastore_Task, 'duration_secs': 0.010852} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.854161] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c650b5dd-3216-4007-b24e-15dd62a6d1aa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.859764] env[69328]: DEBUG oslo_vmware.api [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Waiting for the task: (returnval){ [ 1018.859764] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ecfdfc-b10a-a1fb-b707-dc73002abc5a" [ 1018.859764] env[69328]: _type = "Task" [ 1018.859764] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.867826] env[69328]: DEBUG oslo_vmware.api [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ecfdfc-b10a-a1fb-b707-dc73002abc5a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.941136] env[69328]: DEBUG oslo_vmware.api [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273814, 'name': PowerOnVM_Task, 'duration_secs': 0.536485} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.941485] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1018.941597] env[69328]: INFO nova.compute.manager [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Took 8.86 seconds to spawn the instance on the hypervisor. [ 1018.941825] env[69328]: DEBUG nova.compute.manager [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1018.942601] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9bcf1ee-27bb-43fd-a1e6-4e83085edeab {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.053069] env[69328]: DEBUG oslo_vmware.api [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273816, 'name': Rename_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.065148] env[69328]: DEBUG nova.network.neutron [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 19f537b7-90fc-4832-b137-e042e00a508b] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1019.133222] env[69328]: DEBUG nova.objects.base [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Object Instance<76210566-12d7-4f6a-afa1-6329e87e0f85> lazy-loaded attributes: resources,numa_topology {{(pid=69328) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1019.190617] env[69328]: DEBUG oslo_concurrency.lockutils [None req-dda2c9ea-ab23-4526-ba73-3282180787b9 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "96f604a9-e42c-4aa8-b5b5-edcb34901d94" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 20.970s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1019.245507] env[69328]: DEBUG nova.network.neutron [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 19f537b7-90fc-4832-b137-e042e00a508b] Updating instance_info_cache with network_info: [{"id": "5fae6e5f-3223-4872-83ba-b127e3c15d40", "address": "fa:16:3e:f1:91:d2", "network": {"id": "bd599844-1a77-4107-a344-7b4ffd53ea20", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1763799815-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f5cd4dfb0b54081aba7bf8620521193", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b83383f-ed7a-4efd-aef7-aa8c15649d07", "external-id": "nsx-vlan-transportzone-282", "segmentation_id": 282, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5fae6e5f-32", "ovs_interfaceid": "5fae6e5f-3223-4872-83ba-b127e3c15d40", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1019.374018] env[69328]: DEBUG oslo_vmware.api [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ecfdfc-b10a-a1fb-b707-dc73002abc5a, 'name': SearchDatastore_Task, 'duration_secs': 0.038202} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.374018] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1019.374360] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 6b9757de-a274-4f4d-9b73-cc2ca92b4732/6b9757de-a274-4f4d-9b73-cc2ca92b4732.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1019.374924] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-91def65e-81ce-4f63-ad5e-38585328428f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.383292] env[69328]: DEBUG oslo_vmware.api [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Waiting for the task: (returnval){ [ 1019.383292] env[69328]: value = "task-3273817" [ 1019.383292] env[69328]: _type = "Task" [ 1019.383292] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.395592] env[69328]: DEBUG oslo_vmware.api [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273817, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.465551] env[69328]: INFO nova.compute.manager [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Took 21.61 seconds to build instance. [ 1019.500682] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-552949f1-8c4e-44d0-82ea-e088aaa6bb25 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.511924] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df54d2e9-61a7-4a97-bf8a-391587c2507b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.547532] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7e509ae-26d7-49c3-a32e-3af4e9dc6798 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.556663] env[69328]: DEBUG oslo_vmware.api [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273816, 'name': Rename_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.560358] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d2bbe48-6bfa-483b-80cb-090f909d757e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.579940] env[69328]: DEBUG nova.compute.provider_tree [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1019.652330] env[69328]: DEBUG nova.compute.manager [req-35aca99a-f963-48a5-8607-d1155091a1e4 req-4829a7d5-67e2-4d47-99e7-92dfa197c7ea service nova] [instance: 19f537b7-90fc-4832-b137-e042e00a508b] Received event network-vif-plugged-5fae6e5f-3223-4872-83ba-b127e3c15d40 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1019.652330] env[69328]: DEBUG oslo_concurrency.lockutils [req-35aca99a-f963-48a5-8607-d1155091a1e4 req-4829a7d5-67e2-4d47-99e7-92dfa197c7ea service nova] Acquiring lock "19f537b7-90fc-4832-b137-e042e00a508b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1019.652330] env[69328]: DEBUG oslo_concurrency.lockutils [req-35aca99a-f963-48a5-8607-d1155091a1e4 req-4829a7d5-67e2-4d47-99e7-92dfa197c7ea service nova] Lock "19f537b7-90fc-4832-b137-e042e00a508b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1019.652330] env[69328]: DEBUG oslo_concurrency.lockutils [req-35aca99a-f963-48a5-8607-d1155091a1e4 req-4829a7d5-67e2-4d47-99e7-92dfa197c7ea service nova] Lock "19f537b7-90fc-4832-b137-e042e00a508b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1019.652330] env[69328]: DEBUG nova.compute.manager [req-35aca99a-f963-48a5-8607-d1155091a1e4 req-4829a7d5-67e2-4d47-99e7-92dfa197c7ea service nova] [instance: 19f537b7-90fc-4832-b137-e042e00a508b] No waiting events found dispatching network-vif-plugged-5fae6e5f-3223-4872-83ba-b127e3c15d40 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1019.652330] env[69328]: WARNING nova.compute.manager [req-35aca99a-f963-48a5-8607-d1155091a1e4 req-4829a7d5-67e2-4d47-99e7-92dfa197c7ea service nova] [instance: 19f537b7-90fc-4832-b137-e042e00a508b] Received unexpected event network-vif-plugged-5fae6e5f-3223-4872-83ba-b127e3c15d40 for instance with vm_state building and task_state spawning. [ 1019.655863] env[69328]: DEBUG nova.compute.manager [req-35aca99a-f963-48a5-8607-d1155091a1e4 req-4829a7d5-67e2-4d47-99e7-92dfa197c7ea service nova] [instance: 19f537b7-90fc-4832-b137-e042e00a508b] Received event network-changed-5fae6e5f-3223-4872-83ba-b127e3c15d40 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1019.655977] env[69328]: DEBUG nova.compute.manager [req-35aca99a-f963-48a5-8607-d1155091a1e4 req-4829a7d5-67e2-4d47-99e7-92dfa197c7ea service nova] [instance: 19f537b7-90fc-4832-b137-e042e00a508b] Refreshing instance network info cache due to event network-changed-5fae6e5f-3223-4872-83ba-b127e3c15d40. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1019.656175] env[69328]: DEBUG oslo_concurrency.lockutils [req-35aca99a-f963-48a5-8607-d1155091a1e4 req-4829a7d5-67e2-4d47-99e7-92dfa197c7ea service nova] Acquiring lock "refresh_cache-19f537b7-90fc-4832-b137-e042e00a508b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1019.723895] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0f547f2f-a43b-462c-881b-bbc0919972d2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "96f604a9-e42c-4aa8-b5b5-edcb34901d94" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1019.724361] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0f547f2f-a43b-462c-881b-bbc0919972d2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "96f604a9-e42c-4aa8-b5b5-edcb34901d94" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1019.724973] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0f547f2f-a43b-462c-881b-bbc0919972d2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "96f604a9-e42c-4aa8-b5b5-edcb34901d94-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1019.724973] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0f547f2f-a43b-462c-881b-bbc0919972d2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "96f604a9-e42c-4aa8-b5b5-edcb34901d94-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1019.725229] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0f547f2f-a43b-462c-881b-bbc0919972d2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "96f604a9-e42c-4aa8-b5b5-edcb34901d94-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1019.727869] env[69328]: INFO nova.compute.manager [None req-0f547f2f-a43b-462c-881b-bbc0919972d2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Terminating instance [ 1019.748476] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Releasing lock "refresh_cache-19f537b7-90fc-4832-b137-e042e00a508b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1019.748922] env[69328]: DEBUG nova.compute.manager [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 19f537b7-90fc-4832-b137-e042e00a508b] Instance network_info: |[{"id": "5fae6e5f-3223-4872-83ba-b127e3c15d40", "address": "fa:16:3e:f1:91:d2", "network": {"id": "bd599844-1a77-4107-a344-7b4ffd53ea20", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1763799815-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f5cd4dfb0b54081aba7bf8620521193", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b83383f-ed7a-4efd-aef7-aa8c15649d07", "external-id": "nsx-vlan-transportzone-282", "segmentation_id": 282, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5fae6e5f-32", "ovs_interfaceid": "5fae6e5f-3223-4872-83ba-b127e3c15d40", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1019.749412] env[69328]: DEBUG oslo_concurrency.lockutils [req-35aca99a-f963-48a5-8607-d1155091a1e4 req-4829a7d5-67e2-4d47-99e7-92dfa197c7ea service nova] Acquired lock "refresh_cache-19f537b7-90fc-4832-b137-e042e00a508b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1019.749692] env[69328]: DEBUG nova.network.neutron [req-35aca99a-f963-48a5-8607-d1155091a1e4 req-4829a7d5-67e2-4d47-99e7-92dfa197c7ea service nova] [instance: 19f537b7-90fc-4832-b137-e042e00a508b] Refreshing network info cache for port 5fae6e5f-3223-4872-83ba-b127e3c15d40 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1019.751180] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 19f537b7-90fc-4832-b137-e042e00a508b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f1:91:d2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7b83383f-ed7a-4efd-aef7-aa8c15649d07', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5fae6e5f-3223-4872-83ba-b127e3c15d40', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1019.765912] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1019.770217] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 19f537b7-90fc-4832-b137-e042e00a508b] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1019.771034] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cb4248c4-4566-441e-b4a8-5cd08efa3f69 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.795216] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1019.795216] env[69328]: value = "task-3273818" [ 1019.795216] env[69328]: _type = "Task" [ 1019.795216] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.805128] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273818, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.896340] env[69328]: DEBUG oslo_vmware.api [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273817, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.968189] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e3fc43a7-f271-43ab-9f04-730611ed1102 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "dc050589-e37a-4798-9532-df4ecfab7eb1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.124s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1020.055294] env[69328]: DEBUG oslo_vmware.api [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273816, 'name': Rename_Task, 'duration_secs': 1.231644} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.055515] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1020.055767] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-38e0b2bd-33a5-4592-853b-7d659a294685 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.063308] env[69328]: DEBUG oslo_vmware.api [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Waiting for the task: (returnval){ [ 1020.063308] env[69328]: value = "task-3273819" [ 1020.063308] env[69328]: _type = "Task" [ 1020.063308] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.071339] env[69328]: DEBUG oslo_vmware.api [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273819, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.082042] env[69328]: DEBUG nova.scheduler.client.report [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1020.114606] env[69328]: DEBUG nova.network.neutron [req-35aca99a-f963-48a5-8607-d1155091a1e4 req-4829a7d5-67e2-4d47-99e7-92dfa197c7ea service nova] [instance: 19f537b7-90fc-4832-b137-e042e00a508b] Updated VIF entry in instance network info cache for port 5fae6e5f-3223-4872-83ba-b127e3c15d40. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1020.114606] env[69328]: DEBUG nova.network.neutron [req-35aca99a-f963-48a5-8607-d1155091a1e4 req-4829a7d5-67e2-4d47-99e7-92dfa197c7ea service nova] [instance: 19f537b7-90fc-4832-b137-e042e00a508b] Updating instance_info_cache with network_info: [{"id": "5fae6e5f-3223-4872-83ba-b127e3c15d40", "address": "fa:16:3e:f1:91:d2", "network": {"id": "bd599844-1a77-4107-a344-7b4ffd53ea20", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1763799815-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f5cd4dfb0b54081aba7bf8620521193", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b83383f-ed7a-4efd-aef7-aa8c15649d07", "external-id": "nsx-vlan-transportzone-282", "segmentation_id": 282, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5fae6e5f-32", "ovs_interfaceid": "5fae6e5f-3223-4872-83ba-b127e3c15d40", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1020.208072] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "14521ee3-d749-48b4-aeec-23c94ca2cf9f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1020.208072] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "14521ee3-d749-48b4-aeec-23c94ca2cf9f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1020.234666] env[69328]: DEBUG nova.compute.manager [None req-0f547f2f-a43b-462c-881b-bbc0919972d2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1020.234666] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0f547f2f-a43b-462c-881b-bbc0919972d2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1020.235854] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a029aaed-0fe3-4053-a7e0-516cd3839e1f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.245236] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f547f2f-a43b-462c-881b-bbc0919972d2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1020.245515] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d742ea85-9ffe-4b45-a241-47d3818f26bd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.252826] env[69328]: DEBUG oslo_vmware.api [None req-0f547f2f-a43b-462c-881b-bbc0919972d2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 1020.252826] env[69328]: value = "task-3273820" [ 1020.252826] env[69328]: _type = "Task" [ 1020.252826] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.262424] env[69328]: DEBUG oslo_vmware.api [None req-0f547f2f-a43b-462c-881b-bbc0919972d2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273820, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.306714] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273818, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.396080] env[69328]: DEBUG oslo_vmware.api [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273817, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.572764} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.396266] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 6b9757de-a274-4f4d-9b73-cc2ca92b4732/6b9757de-a274-4f4d-9b73-cc2ca92b4732.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1020.396477] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 6b9757de-a274-4f4d-9b73-cc2ca92b4732] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1020.396730] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bfa435b2-d5c4-434b-97de-26377462d0d0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.405450] env[69328]: DEBUG oslo_vmware.api [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Waiting for the task: (returnval){ [ 1020.405450] env[69328]: value = "task-3273821" [ 1020.405450] env[69328]: _type = "Task" [ 1020.405450] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.414192] env[69328]: DEBUG oslo_vmware.api [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273821, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.577818] env[69328]: DEBUG oslo_vmware.api [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273819, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.587835] env[69328]: DEBUG oslo_concurrency.lockutils [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.467s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1020.590589] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f7fd4c57-f252-4c6e-a15f-9c4426398d79 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.516s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1020.590872] env[69328]: DEBUG nova.objects.instance [None req-f7fd4c57-f252-4c6e-a15f-9c4426398d79 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lazy-loading 'resources' on Instance uuid bc9c3a41-7264-4d69-bc15-397b5fa0a8ad {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1020.616946] env[69328]: DEBUG oslo_concurrency.lockutils [req-35aca99a-f963-48a5-8607-d1155091a1e4 req-4829a7d5-67e2-4d47-99e7-92dfa197c7ea service nova] Releasing lock "refresh_cache-19f537b7-90fc-4832-b137-e042e00a508b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1020.710698] env[69328]: DEBUG nova.compute.manager [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 14521ee3-d749-48b4-aeec-23c94ca2cf9f] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1020.765012] env[69328]: DEBUG oslo_vmware.api [None req-0f547f2f-a43b-462c-881b-bbc0919972d2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273820, 'name': PowerOffVM_Task, 'duration_secs': 0.3367} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.765316] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f547f2f-a43b-462c-881b-bbc0919972d2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1020.765486] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0f547f2f-a43b-462c-881b-bbc0919972d2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1020.765748] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d6f4f257-63ef-4149-b454-859c5b1edc98 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.805937] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273818, 'name': CreateVM_Task, 'duration_secs': 0.519641} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.806027] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 19f537b7-90fc-4832-b137-e042e00a508b] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1020.806734] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1020.806919] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1020.807278] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1020.807542] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09b18199-f201-4f21-bbc1-11ddab3ef0d7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.813317] env[69328]: DEBUG oslo_vmware.api [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Waiting for the task: (returnval){ [ 1020.813317] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5279c206-557f-339c-4ee1-9dc547f773c7" [ 1020.813317] env[69328]: _type = "Task" [ 1020.813317] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.821992] env[69328]: DEBUG oslo_vmware.api [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5279c206-557f-339c-4ee1-9dc547f773c7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.839412] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0f547f2f-a43b-462c-881b-bbc0919972d2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1020.839721] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0f547f2f-a43b-462c-881b-bbc0919972d2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1020.839940] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f547f2f-a43b-462c-881b-bbc0919972d2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Deleting the datastore file [datastore2] 96f604a9-e42c-4aa8-b5b5-edcb34901d94 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1020.840243] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d1befaf2-7718-45ef-a0f6-0dbb1612b600 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.848392] env[69328]: DEBUG oslo_vmware.api [None req-0f547f2f-a43b-462c-881b-bbc0919972d2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 1020.848392] env[69328]: value = "task-3273823" [ 1020.848392] env[69328]: _type = "Task" [ 1020.848392] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.857188] env[69328]: DEBUG oslo_vmware.api [None req-0f547f2f-a43b-462c-881b-bbc0919972d2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273823, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.916395] env[69328]: DEBUG oslo_vmware.api [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273821, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074542} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.916395] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 6b9757de-a274-4f4d-9b73-cc2ca92b4732] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1020.917211] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31770564-6f10-4ff0-8a7f-daf640182e16 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.940717] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 6b9757de-a274-4f4d-9b73-cc2ca92b4732] Reconfiguring VM instance instance-0000005e to attach disk [datastore2] 6b9757de-a274-4f4d-9b73-cc2ca92b4732/6b9757de-a274-4f4d-9b73-cc2ca92b4732.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1020.941052] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1cfe062b-17e5-4538-b14e-710e52a79fe3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.961181] env[69328]: DEBUG oslo_vmware.api [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Waiting for the task: (returnval){ [ 1020.961181] env[69328]: value = "task-3273824" [ 1020.961181] env[69328]: _type = "Task" [ 1020.961181] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.970827] env[69328]: DEBUG oslo_vmware.api [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273824, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.078026] env[69328]: DEBUG oslo_vmware.api [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273819, 'name': PowerOnVM_Task, 'duration_secs': 0.600304} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.078026] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1021.078026] env[69328]: INFO nova.compute.manager [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Took 8.47 seconds to spawn the instance on the hypervisor. [ 1021.078026] env[69328]: DEBUG nova.compute.manager [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1021.078026] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b78be6d-30db-4e99-841e-6ed9cf68a61e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.101940] env[69328]: DEBUG oslo_concurrency.lockutils [None req-db684454-de08-4936-b6d5-e99244079d15 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lock "76210566-12d7-4f6a-afa1-6329e87e0f85" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 41.570s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1021.101940] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lock "76210566-12d7-4f6a-afa1-6329e87e0f85" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 16.485s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1021.101940] env[69328]: INFO nova.compute.manager [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Unshelving [ 1021.232567] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1021.331709] env[69328]: DEBUG oslo_vmware.api [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5279c206-557f-339c-4ee1-9dc547f773c7, 'name': SearchDatastore_Task, 'duration_secs': 0.031055} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.332236] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1021.332599] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 19f537b7-90fc-4832-b137-e042e00a508b] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1021.332971] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1021.333250] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1021.333567] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1021.333968] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a8c9e8f1-9b5d-4a30-a6b5-231cea2e6b92 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.349410] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1021.349610] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1021.353374] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91634824-f517-40a3-baf5-67d8eab003ae {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.362418] env[69328]: DEBUG oslo_vmware.api [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Waiting for the task: (returnval){ [ 1021.362418] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]524cc9c6-adc8-c88f-6d97-861e0e177b75" [ 1021.362418] env[69328]: _type = "Task" [ 1021.362418] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.365641] env[69328]: DEBUG oslo_vmware.api [None req-0f547f2f-a43b-462c-881b-bbc0919972d2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273823, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.256756} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.368985] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f547f2f-a43b-462c-881b-bbc0919972d2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1021.369258] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0f547f2f-a43b-462c-881b-bbc0919972d2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1021.369412] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0f547f2f-a43b-462c-881b-bbc0919972d2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1021.369589] env[69328]: INFO nova.compute.manager [None req-0f547f2f-a43b-462c-881b-bbc0919972d2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1021.369904] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0f547f2f-a43b-462c-881b-bbc0919972d2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1021.372955] env[69328]: DEBUG nova.compute.manager [-] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1021.373096] env[69328]: DEBUG nova.network.neutron [-] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1021.382458] env[69328]: DEBUG oslo_vmware.api [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]524cc9c6-adc8-c88f-6d97-861e0e177b75, 'name': SearchDatastore_Task, 'duration_secs': 0.015733} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.383415] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96378441-d4cb-48f8-b5c1-1fa3f0d6f4f2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.391182] env[69328]: DEBUG oslo_vmware.api [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Waiting for the task: (returnval){ [ 1021.391182] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5283de1c-2055-7356-73a8-11c8fc736f3e" [ 1021.391182] env[69328]: _type = "Task" [ 1021.391182] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.401114] env[69328]: DEBUG oslo_vmware.api [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5283de1c-2055-7356-73a8-11c8fc736f3e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.472205] env[69328]: DEBUG oslo_vmware.api [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273824, 'name': ReconfigVM_Task, 'duration_secs': 0.46582} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.474683] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 6b9757de-a274-4f4d-9b73-cc2ca92b4732] Reconfigured VM instance instance-0000005e to attach disk [datastore2] 6b9757de-a274-4f4d-9b73-cc2ca92b4732/6b9757de-a274-4f4d-9b73-cc2ca92b4732.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1021.475546] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-01c3f707-6aa0-4d47-bd41-1dad3021d98f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.483567] env[69328]: DEBUG oslo_vmware.api [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Waiting for the task: (returnval){ [ 1021.483567] env[69328]: value = "task-3273825" [ 1021.483567] env[69328]: _type = "Task" [ 1021.483567] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.497290] env[69328]: DEBUG oslo_vmware.api [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273825, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.538357] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bd25292-1488-4304-bd32-4fc7cde1f44a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.546996] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f367b881-9d84-445b-8d01-bbbd6efb1a6c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.581440] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c50233d-2ada-496d-89c7-b6ef37c11304 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.598342] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fc6ea9b-cbfa-4eab-b561-16d6e33354ff {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.610235] env[69328]: INFO nova.compute.manager [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Took 22.90 seconds to build instance. [ 1021.624149] env[69328]: DEBUG nova.compute.provider_tree [None req-f7fd4c57-f252-4c6e-a15f-9c4426398d79 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1021.645362] env[69328]: DEBUG nova.compute.manager [req-920fccc0-1a15-4097-90ed-8116d9bb0771 req-4f5466aa-3085-432e-8393-be67c9242cb7 service nova] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Received event network-vif-deleted-d97e62a9-59f8-4f3b-9296-f5a0803d2b10 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1021.645646] env[69328]: INFO nova.compute.manager [req-920fccc0-1a15-4097-90ed-8116d9bb0771 req-4f5466aa-3085-432e-8393-be67c9242cb7 service nova] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Neutron deleted interface d97e62a9-59f8-4f3b-9296-f5a0803d2b10; detaching it from the instance and deleting it from the info cache [ 1021.645859] env[69328]: DEBUG nova.network.neutron [req-920fccc0-1a15-4097-90ed-8116d9bb0771 req-4f5466aa-3085-432e-8393-be67c9242cb7 service nova] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1021.688170] env[69328]: DEBUG nova.compute.manager [req-f9f08a82-3f7d-4973-8c30-3f14ff962ff1 req-6a6438bc-aef5-4ca8-b0f4-8da38d8fe002 service nova] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Received event network-changed-95776220-5fd9-42a1-8bf9-cfb9fe49d62d {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1021.688170] env[69328]: DEBUG nova.compute.manager [req-f9f08a82-3f7d-4973-8c30-3f14ff962ff1 req-6a6438bc-aef5-4ca8-b0f4-8da38d8fe002 service nova] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Refreshing instance network info cache due to event network-changed-95776220-5fd9-42a1-8bf9-cfb9fe49d62d. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1021.688366] env[69328]: DEBUG oslo_concurrency.lockutils [req-f9f08a82-3f7d-4973-8c30-3f14ff962ff1 req-6a6438bc-aef5-4ca8-b0f4-8da38d8fe002 service nova] Acquiring lock "refresh_cache-dc050589-e37a-4798-9532-df4ecfab7eb1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1021.688519] env[69328]: DEBUG oslo_concurrency.lockutils [req-f9f08a82-3f7d-4973-8c30-3f14ff962ff1 req-6a6438bc-aef5-4ca8-b0f4-8da38d8fe002 service nova] Acquired lock "refresh_cache-dc050589-e37a-4798-9532-df4ecfab7eb1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1021.688650] env[69328]: DEBUG nova.network.neutron [req-f9f08a82-3f7d-4973-8c30-3f14ff962ff1 req-6a6438bc-aef5-4ca8-b0f4-8da38d8fe002 service nova] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Refreshing network info cache for port 95776220-5fd9-42a1-8bf9-cfb9fe49d62d {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1021.903505] env[69328]: DEBUG oslo_vmware.api [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5283de1c-2055-7356-73a8-11c8fc736f3e, 'name': SearchDatastore_Task, 'duration_secs': 0.011764} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.903933] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1021.904057] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 19f537b7-90fc-4832-b137-e042e00a508b/19f537b7-90fc-4832-b137-e042e00a508b.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1021.904357] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-96ae36c1-1f4c-456c-9bb8-f097ddc78142 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.912456] env[69328]: DEBUG oslo_vmware.api [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Waiting for the task: (returnval){ [ 1021.912456] env[69328]: value = "task-3273826" [ 1021.912456] env[69328]: _type = "Task" [ 1021.912456] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.920893] env[69328]: DEBUG oslo_vmware.api [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273826, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.994039] env[69328]: DEBUG oslo_vmware.api [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273825, 'name': Rename_Task, 'duration_secs': 0.163797} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.994359] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 6b9757de-a274-4f4d-9b73-cc2ca92b4732] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1021.994612] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-93c25aa2-c42a-4c22-97f4-5e72c1504661 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.001489] env[69328]: DEBUG oslo_vmware.api [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Waiting for the task: (returnval){ [ 1022.001489] env[69328]: value = "task-3273827" [ 1022.001489] env[69328]: _type = "Task" [ 1022.001489] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.009767] env[69328]: DEBUG oslo_vmware.api [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273827, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.113135] env[69328]: DEBUG oslo_concurrency.lockutils [None req-698c4db3-a432-4c48-b023-ca095fce11b0 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Lock "dd43adb3-b073-483a-81dd-69df7f746874" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.412s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1022.113135] env[69328]: DEBUG nova.network.neutron [-] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1022.128605] env[69328]: DEBUG nova.compute.utils [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1022.130763] env[69328]: DEBUG nova.scheduler.client.report [None req-f7fd4c57-f252-4c6e-a15f-9c4426398d79 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1022.148461] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-39cc7070-3b65-4318-9adf-5653d23a84b4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.159828] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64e3690f-773b-4c5d-bee2-2f6a54b66df7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.204912] env[69328]: DEBUG nova.compute.manager [req-920fccc0-1a15-4097-90ed-8116d9bb0771 req-4f5466aa-3085-432e-8393-be67c9242cb7 service nova] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Detach interface failed, port_id=d97e62a9-59f8-4f3b-9296-f5a0803d2b10, reason: Instance 96f604a9-e42c-4aa8-b5b5-edcb34901d94 could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1022.423696] env[69328]: DEBUG oslo_vmware.api [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273826, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.466499] env[69328]: DEBUG nova.network.neutron [req-f9f08a82-3f7d-4973-8c30-3f14ff962ff1 req-6a6438bc-aef5-4ca8-b0f4-8da38d8fe002 service nova] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Updated VIF entry in instance network info cache for port 95776220-5fd9-42a1-8bf9-cfb9fe49d62d. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1022.466952] env[69328]: DEBUG nova.network.neutron [req-f9f08a82-3f7d-4973-8c30-3f14ff962ff1 req-6a6438bc-aef5-4ca8-b0f4-8da38d8fe002 service nova] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Updating instance_info_cache with network_info: [{"id": "95776220-5fd9-42a1-8bf9-cfb9fe49d62d", "address": "fa:16:3e:0c:f4:26", "network": {"id": "aed15283-4a79-4e99-8b6c-49cf754138de", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1271042528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.130", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30209bc93a4042488f15c73b7e4733d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap95776220-5f", "ovs_interfaceid": "95776220-5fd9-42a1-8bf9-cfb9fe49d62d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1022.511174] env[69328]: DEBUG oslo_vmware.api [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273827, 'name': PowerOnVM_Task} progress is 71%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.615473] env[69328]: INFO nova.compute.manager [-] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Took 1.24 seconds to deallocate network for instance. [ 1022.636069] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f7fd4c57-f252-4c6e-a15f-9c4426398d79 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.045s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1022.639205] env[69328]: INFO nova.virt.block_device [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Booting with volume 68ea45a2-2443-494f-afc8-d4648ea33fa0 at /dev/sdb [ 1022.641699] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.617s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1022.642826] env[69328]: INFO nova.compute.claims [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1022.663863] env[69328]: INFO nova.scheduler.client.report [None req-f7fd4c57-f252-4c6e-a15f-9c4426398d79 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Deleted allocations for instance bc9c3a41-7264-4d69-bc15-397b5fa0a8ad [ 1022.681477] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7d0342bd-c70a-4c2f-858a-6872a0af3dcc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.692551] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86648daf-2ea8-4e8f-868e-6a8b4b140b51 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.732283] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6504a57c-c987-423b-ad20-744f4ec16cd4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.742672] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68721dfc-db80-4d9b-b79b-abd3990ea67d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.783267] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-710b5453-9b4d-42b8-8cc5-74b0c7ec140f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.793216] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54ce8667-2e78-4e02-aea5-2f7bc1caf3b8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.810960] env[69328]: DEBUG nova.virt.block_device [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Updating existing volume attachment record: c4b7862d-df68-49ad-8a7f-7a4fb0f31ffd {{(pid=69328) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1022.923556] env[69328]: DEBUG oslo_vmware.api [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273826, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.585264} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.923864] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 19f537b7-90fc-4832-b137-e042e00a508b/19f537b7-90fc-4832-b137-e042e00a508b.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1022.923957] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 19f537b7-90fc-4832-b137-e042e00a508b] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1022.924292] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-46909c13-e3ba-4c3d-9bf9-7db665b31dcc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.932526] env[69328]: DEBUG oslo_vmware.api [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Waiting for the task: (returnval){ [ 1022.932526] env[69328]: value = "task-3273828" [ 1022.932526] env[69328]: _type = "Task" [ 1022.932526] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.945492] env[69328]: DEBUG oslo_vmware.api [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273828, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.971114] env[69328]: DEBUG oslo_concurrency.lockutils [req-f9f08a82-3f7d-4973-8c30-3f14ff962ff1 req-6a6438bc-aef5-4ca8-b0f4-8da38d8fe002 service nova] Releasing lock "refresh_cache-dc050589-e37a-4798-9532-df4ecfab7eb1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1023.013918] env[69328]: DEBUG oslo_vmware.api [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273827, 'name': PowerOnVM_Task, 'duration_secs': 0.826192} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.014422] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 6b9757de-a274-4f4d-9b73-cc2ca92b4732] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1023.014760] env[69328]: INFO nova.compute.manager [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 6b9757de-a274-4f4d-9b73-cc2ca92b4732] Took 7.89 seconds to spawn the instance on the hypervisor. [ 1023.015074] env[69328]: DEBUG nova.compute.manager [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 6b9757de-a274-4f4d-9b73-cc2ca92b4732] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1023.016278] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53dc7c9a-8c41-421f-972a-17e9a8ab4eb3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.123384] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0f547f2f-a43b-462c-881b-bbc0919972d2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1023.172888] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f7fd4c57-f252-4c6e-a15f-9c4426398d79 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "bc9c3a41-7264-4d69-bc15-397b5fa0a8ad" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.210s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1023.444472] env[69328]: DEBUG oslo_vmware.api [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273828, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072474} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.444736] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 19f537b7-90fc-4832-b137-e042e00a508b] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1023.445584] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f796c2d-b239-44b2-96c0-88b5adc62210 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.469450] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 19f537b7-90fc-4832-b137-e042e00a508b] Reconfiguring VM instance instance-0000005f to attach disk [datastore2] 19f537b7-90fc-4832-b137-e042e00a508b/19f537b7-90fc-4832-b137-e042e00a508b.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1023.469791] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5a2f20fd-ef48-4a80-9901-b5e5119838f9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.490812] env[69328]: DEBUG oslo_vmware.api [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Waiting for the task: (returnval){ [ 1023.490812] env[69328]: value = "task-3273832" [ 1023.490812] env[69328]: _type = "Task" [ 1023.490812] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.499146] env[69328]: DEBUG oslo_vmware.api [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273832, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.536175] env[69328]: INFO nova.compute.manager [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 6b9757de-a274-4f4d-9b73-cc2ca92b4732] Took 24.60 seconds to build instance. [ 1023.780439] env[69328]: DEBUG oslo_concurrency.lockutils [None req-99c1aae4-9379-4554-8864-91527361c589 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "3ba646e8-a5c8-4917-a1c4-32b37affb598" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1023.781185] env[69328]: DEBUG oslo_concurrency.lockutils [None req-99c1aae4-9379-4554-8864-91527361c589 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "3ba646e8-a5c8-4917-a1c4-32b37affb598" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1024.004307] env[69328]: DEBUG oslo_vmware.api [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273832, 'name': ReconfigVM_Task, 'duration_secs': 0.365732} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.006509] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 19f537b7-90fc-4832-b137-e042e00a508b] Reconfigured VM instance instance-0000005f to attach disk [datastore2] 19f537b7-90fc-4832-b137-e042e00a508b/19f537b7-90fc-4832-b137-e042e00a508b.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1024.007567] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-410d183f-343c-4139-a0b0-937e2c690308 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.015267] env[69328]: DEBUG oslo_vmware.api [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Waiting for the task: (returnval){ [ 1024.015267] env[69328]: value = "task-3273833" [ 1024.015267] env[69328]: _type = "Task" [ 1024.015267] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.026718] env[69328]: DEBUG oslo_vmware.api [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273833, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.038720] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b17b013b-f6e5-4c1f-bacc-447044c3e22d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Lock "6b9757de-a274-4f4d-9b73-cc2ca92b4732" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.109s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1024.063830] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31a5fdff-a38b-4cf9-b8df-51307843e577 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.072449] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1205fd12-8e8d-4b4f-93cc-07f619a61963 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.104162] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8018339-755e-427b-9468-be681376eb73 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.113276] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e932798b-b6b4-4380-a035-d20b2ce720bf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.130119] env[69328]: DEBUG nova.compute.provider_tree [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1024.286813] env[69328]: INFO nova.compute.manager [None req-99c1aae4-9379-4554-8864-91527361c589 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Detaching volume a39fd325-7c9d-4482-b7a4-43b28bf52e5c [ 1024.334283] env[69328]: INFO nova.virt.block_device [None req-99c1aae4-9379-4554-8864-91527361c589 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Attempting to driver detach volume a39fd325-7c9d-4482-b7a4-43b28bf52e5c from mountpoint /dev/sdb [ 1024.334283] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-99c1aae4-9379-4554-8864-91527361c589 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Volume detach. Driver type: vmdk {{(pid=69328) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1024.334283] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-99c1aae4-9379-4554-8864-91527361c589 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653876', 'volume_id': 'a39fd325-7c9d-4482-b7a4-43b28bf52e5c', 'name': 'volume-a39fd325-7c9d-4482-b7a4-43b28bf52e5c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3ba646e8-a5c8-4917-a1c4-32b37affb598', 'attached_at': '', 'detached_at': '', 'volume_id': 'a39fd325-7c9d-4482-b7a4-43b28bf52e5c', 'serial': 'a39fd325-7c9d-4482-b7a4-43b28bf52e5c'} {{(pid=69328) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1024.334283] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60c5481c-8ac3-429b-ac4f-1b19761f467a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.361695] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05825676-62ac-4554-8d0d-290104305905 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.370308] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8ed5793-9d7b-4119-9855-beed4f93e184 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.391580] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8292018d-c40e-4362-b650-f38f12b7ed9e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.407610] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-99c1aae4-9379-4554-8864-91527361c589 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] The volume has not been displaced from its original location: [datastore2] volume-a39fd325-7c9d-4482-b7a4-43b28bf52e5c/volume-a39fd325-7c9d-4482-b7a4-43b28bf52e5c.vmdk. No consolidation needed. {{(pid=69328) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1024.412962] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-99c1aae4-9379-4554-8864-91527361c589 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Reconfiguring VM instance instance-00000046 to detach disk 2001 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1024.413355] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-948b3bf8-0716-401e-a2af-ebddd1141166 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.433495] env[69328]: DEBUG oslo_vmware.api [None req-99c1aae4-9379-4554-8864-91527361c589 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 1024.433495] env[69328]: value = "task-3273834" [ 1024.433495] env[69328]: _type = "Task" [ 1024.433495] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.442606] env[69328]: DEBUG oslo_vmware.api [None req-99c1aae4-9379-4554-8864-91527361c589 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273834, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.525138] env[69328]: DEBUG oslo_vmware.api [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273833, 'name': Rename_Task, 'duration_secs': 0.226386} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.525477] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 19f537b7-90fc-4832-b137-e042e00a508b] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1024.525758] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f6d51432-3176-4d7e-af0b-b005de924399 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.532768] env[69328]: DEBUG oslo_vmware.api [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Waiting for the task: (returnval){ [ 1024.532768] env[69328]: value = "task-3273835" [ 1024.532768] env[69328]: _type = "Task" [ 1024.532768] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.541053] env[69328]: DEBUG oslo_vmware.api [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273835, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.633441] env[69328]: DEBUG nova.scheduler.client.report [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1024.946643] env[69328]: DEBUG oslo_vmware.api [None req-99c1aae4-9379-4554-8864-91527361c589 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273834, 'name': ReconfigVM_Task, 'duration_secs': 0.496737} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.946949] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-99c1aae4-9379-4554-8864-91527361c589 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Reconfigured VM instance instance-00000046 to detach disk 2001 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1024.951945] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0dd2adcb-b2e5-4624-a99b-52c6053efa65 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.968808] env[69328]: DEBUG oslo_vmware.api [None req-99c1aae4-9379-4554-8864-91527361c589 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 1024.968808] env[69328]: value = "task-3273836" [ 1024.968808] env[69328]: _type = "Task" [ 1024.968808] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.978384] env[69328]: DEBUG oslo_vmware.api [None req-99c1aae4-9379-4554-8864-91527361c589 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273836, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.043727] env[69328]: DEBUG oslo_vmware.api [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273835, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.139219] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.498s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1025.139812] env[69328]: DEBUG nova.compute.manager [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1025.142747] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.298s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1025.144030] env[69328]: INFO nova.compute.claims [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1025.480621] env[69328]: DEBUG oslo_vmware.api [None req-99c1aae4-9379-4554-8864-91527361c589 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273836, 'name': ReconfigVM_Task, 'duration_secs': 0.307235} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.481048] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-99c1aae4-9379-4554-8864-91527361c589 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653876', 'volume_id': 'a39fd325-7c9d-4482-b7a4-43b28bf52e5c', 'name': 'volume-a39fd325-7c9d-4482-b7a4-43b28bf52e5c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3ba646e8-a5c8-4917-a1c4-32b37affb598', 'attached_at': '', 'detached_at': '', 'volume_id': 'a39fd325-7c9d-4482-b7a4-43b28bf52e5c', 'serial': 'a39fd325-7c9d-4482-b7a4-43b28bf52e5c'} {{(pid=69328) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1025.545038] env[69328]: DEBUG oslo_vmware.api [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273835, 'name': PowerOnVM_Task, 'duration_secs': 0.840429} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.545363] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 19f537b7-90fc-4832-b137-e042e00a508b] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1025.545588] env[69328]: INFO nova.compute.manager [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 19f537b7-90fc-4832-b137-e042e00a508b] Took 7.93 seconds to spawn the instance on the hypervisor. [ 1025.545793] env[69328]: DEBUG nova.compute.manager [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 19f537b7-90fc-4832-b137-e042e00a508b] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1025.546697] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dad23849-8d3c-443d-8f29-37553090178d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.631160] env[69328]: DEBUG oslo_concurrency.lockutils [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquiring lock "071c1837-9d0b-4b69-b16e-991b300385fb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1025.631528] env[69328]: DEBUG oslo_concurrency.lockutils [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "071c1837-9d0b-4b69-b16e-991b300385fb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1025.647919] env[69328]: DEBUG nova.compute.utils [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1025.652955] env[69328]: DEBUG nova.compute.manager [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1025.653235] env[69328]: DEBUG nova.network.neutron [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1025.708931] env[69328]: DEBUG nova.policy [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '43be625728f24af5a2f6a650279d689d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cdc479a290524130b9d17e627a64b65a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 1026.041121] env[69328]: DEBUG nova.network.neutron [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Successfully created port: e957681a-e4bc-4b9a-b2b7-a4783ae059b8 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1026.044215] env[69328]: DEBUG nova.objects.instance [None req-99c1aae4-9379-4554-8864-91527361c589 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lazy-loading 'flavor' on Instance uuid 3ba646e8-a5c8-4917-a1c4-32b37affb598 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1026.068143] env[69328]: INFO nova.compute.manager [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 19f537b7-90fc-4832-b137-e042e00a508b] Took 26.74 seconds to build instance. [ 1026.134168] env[69328]: DEBUG nova.compute.manager [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1026.151524] env[69328]: DEBUG nova.compute.manager [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1026.545569] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ac605b4-839c-46f8-aef5-fd6e4faec69c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.555641] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe5377f8-ba27-41c1-81a2-af172860467a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.589691] env[69328]: DEBUG oslo_concurrency.lockutils [None req-fa7f4eb7-0416-4832-a964-9588a3e1a28e tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Lock "19f537b7-90fc-4832-b137-e042e00a508b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.265s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1026.590783] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-451a32f2-a1c0-47cc-85ab-4a2e69d2c7e4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.601245] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-463574af-5ba1-4752-8f3b-481ce5585ad8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.617580] env[69328]: DEBUG nova.compute.provider_tree [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1026.652603] env[69328]: DEBUG oslo_concurrency.lockutils [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1027.055649] env[69328]: DEBUG oslo_concurrency.lockutils [None req-99c1aae4-9379-4554-8864-91527361c589 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "3ba646e8-a5c8-4917-a1c4-32b37affb598" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.275s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1027.121422] env[69328]: DEBUG nova.scheduler.client.report [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1027.161782] env[69328]: DEBUG nova.compute.manager [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1027.186734] env[69328]: DEBUG nova.virt.hardware [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1027.186734] env[69328]: DEBUG nova.virt.hardware [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1027.186734] env[69328]: DEBUG nova.virt.hardware [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1027.186893] env[69328]: DEBUG nova.virt.hardware [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1027.187009] env[69328]: DEBUG nova.virt.hardware [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1027.187168] env[69328]: DEBUG nova.virt.hardware [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1027.187381] env[69328]: DEBUG nova.virt.hardware [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1027.187540] env[69328]: DEBUG nova.virt.hardware [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1027.187707] env[69328]: DEBUG nova.virt.hardware [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1027.187868] env[69328]: DEBUG nova.virt.hardware [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1027.188060] env[69328]: DEBUG nova.virt.hardware [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1027.188923] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4470df0-39a9-4d8c-8ed1-242de08ed3cb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.199733] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e3feba6-91ef-46f4-83cb-eb2d2436c4f1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.232146] env[69328]: DEBUG oslo_concurrency.lockutils [None req-81ca0695-bba3-49b9-ac61-689ae87a5224 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "3ba646e8-a5c8-4917-a1c4-32b37affb598" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1027.232429] env[69328]: DEBUG oslo_concurrency.lockutils [None req-81ca0695-bba3-49b9-ac61-689ae87a5224 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "3ba646e8-a5c8-4917-a1c4-32b37affb598" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1027.232995] env[69328]: DEBUG oslo_concurrency.lockutils [None req-81ca0695-bba3-49b9-ac61-689ae87a5224 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "3ba646e8-a5c8-4917-a1c4-32b37affb598-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1027.232995] env[69328]: DEBUG oslo_concurrency.lockutils [None req-81ca0695-bba3-49b9-ac61-689ae87a5224 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "3ba646e8-a5c8-4917-a1c4-32b37affb598-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1027.232995] env[69328]: DEBUG oslo_concurrency.lockutils [None req-81ca0695-bba3-49b9-ac61-689ae87a5224 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "3ba646e8-a5c8-4917-a1c4-32b37affb598-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1027.235071] env[69328]: INFO nova.compute.manager [None req-81ca0695-bba3-49b9-ac61-689ae87a5224 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Terminating instance [ 1027.578878] env[69328]: DEBUG nova.compute.manager [req-1252956e-4d75-4d73-8c30-7a6d466c9320 req-9ea6edfe-223b-42d0-a3fd-b7b68d57081e service nova] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Received event network-vif-plugged-e957681a-e4bc-4b9a-b2b7-a4783ae059b8 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1027.579106] env[69328]: DEBUG oslo_concurrency.lockutils [req-1252956e-4d75-4d73-8c30-7a6d466c9320 req-9ea6edfe-223b-42d0-a3fd-b7b68d57081e service nova] Acquiring lock "204286d7-c806-48cb-85e9-b2a78571777c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1027.579316] env[69328]: DEBUG oslo_concurrency.lockutils [req-1252956e-4d75-4d73-8c30-7a6d466c9320 req-9ea6edfe-223b-42d0-a3fd-b7b68d57081e service nova] Lock "204286d7-c806-48cb-85e9-b2a78571777c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1027.579506] env[69328]: DEBUG oslo_concurrency.lockutils [req-1252956e-4d75-4d73-8c30-7a6d466c9320 req-9ea6edfe-223b-42d0-a3fd-b7b68d57081e service nova] Lock "204286d7-c806-48cb-85e9-b2a78571777c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1027.579634] env[69328]: DEBUG nova.compute.manager [req-1252956e-4d75-4d73-8c30-7a6d466c9320 req-9ea6edfe-223b-42d0-a3fd-b7b68d57081e service nova] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] No waiting events found dispatching network-vif-plugged-e957681a-e4bc-4b9a-b2b7-a4783ae059b8 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1027.579791] env[69328]: WARNING nova.compute.manager [req-1252956e-4d75-4d73-8c30-7a6d466c9320 req-9ea6edfe-223b-42d0-a3fd-b7b68d57081e service nova] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Received unexpected event network-vif-plugged-e957681a-e4bc-4b9a-b2b7-a4783ae059b8 for instance with vm_state building and task_state spawning. [ 1027.626385] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.484s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1027.626884] env[69328]: DEBUG nova.compute.manager [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1027.629387] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7b0be2c6-034d-44a2-9992-d33f2e03ccd0 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.058s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1027.629634] env[69328]: DEBUG nova.objects.instance [None req-7b0be2c6-034d-44a2-9992-d33f2e03ccd0 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Lazy-loading 'resources' on Instance uuid 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1027.671019] env[69328]: DEBUG nova.network.neutron [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Successfully updated port: e957681a-e4bc-4b9a-b2b7-a4783ae059b8 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1027.738366] env[69328]: DEBUG nova.compute.manager [None req-81ca0695-bba3-49b9-ac61-689ae87a5224 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1027.738538] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-81ca0695-bba3-49b9-ac61-689ae87a5224 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1027.739451] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbe06197-b96c-4197-a838-c6554869db8b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.743912] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9235f748-0f4c-40f7-834f-2e27c2f2019f tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Acquiring lock "dd43adb3-b073-483a-81dd-69df7f746874" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1027.744147] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9235f748-0f4c-40f7-834f-2e27c2f2019f tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Lock "dd43adb3-b073-483a-81dd-69df7f746874" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1027.744323] env[69328]: DEBUG nova.compute.manager [None req-9235f748-0f4c-40f7-834f-2e27c2f2019f tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1027.745069] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-718dc1b4-dd0c-4ce6-a9eb-0c264569ce7d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.749763] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-81ca0695-bba3-49b9-ac61-689ae87a5224 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1027.750611] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-49b41e8e-e9c7-4c9f-bb30-82c1a46f4d9c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.753367] env[69328]: DEBUG nova.compute.manager [None req-9235f748-0f4c-40f7-834f-2e27c2f2019f tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69328) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1027.753910] env[69328]: DEBUG nova.objects.instance [None req-9235f748-0f4c-40f7-834f-2e27c2f2019f tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Lazy-loading 'flavor' on Instance uuid dd43adb3-b073-483a-81dd-69df7f746874 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1027.761213] env[69328]: DEBUG oslo_vmware.api [None req-81ca0695-bba3-49b9-ac61-689ae87a5224 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 1027.761213] env[69328]: value = "task-3273838" [ 1027.761213] env[69328]: _type = "Task" [ 1027.761213] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.770125] env[69328]: DEBUG oslo_vmware.api [None req-81ca0695-bba3-49b9-ac61-689ae87a5224 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273838, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.132692] env[69328]: DEBUG nova.compute.utils [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1028.136817] env[69328]: DEBUG nova.compute.manager [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1028.136911] env[69328]: DEBUG nova.network.neutron [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1028.173739] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "refresh_cache-204286d7-c806-48cb-85e9-b2a78571777c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1028.175181] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquired lock "refresh_cache-204286d7-c806-48cb-85e9-b2a78571777c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1028.175181] env[69328]: DEBUG nova.network.neutron [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1028.186185] env[69328]: DEBUG nova.policy [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0ca24c1b09374feeaec13dfeeaf02d94', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6bad0df17bba4bc996fe5cf1faf23fad', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 1028.274996] env[69328]: DEBUG oslo_vmware.api [None req-81ca0695-bba3-49b9-ac61-689ae87a5224 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273838, 'name': PowerOffVM_Task, 'duration_secs': 0.321736} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.275140] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-81ca0695-bba3-49b9-ac61-689ae87a5224 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1028.275277] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-81ca0695-bba3-49b9-ac61-689ae87a5224 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1028.275777] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c3aaea75-1258-49bd-99f4-a46676f905ea {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.349119] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-81ca0695-bba3-49b9-ac61-689ae87a5224 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1028.349119] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-81ca0695-bba3-49b9-ac61-689ae87a5224 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1028.349119] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-81ca0695-bba3-49b9-ac61-689ae87a5224 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Deleting the datastore file [datastore2] 3ba646e8-a5c8-4917-a1c4-32b37affb598 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1028.349119] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a32876ef-e089-4db9-baa4-9d065536ffb9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.364021] env[69328]: DEBUG oslo_vmware.api [None req-81ca0695-bba3-49b9-ac61-689ae87a5224 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 1028.364021] env[69328]: value = "task-3273840" [ 1028.364021] env[69328]: _type = "Task" [ 1028.364021] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.370795] env[69328]: DEBUG oslo_vmware.api [None req-81ca0695-bba3-49b9-ac61-689ae87a5224 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273840, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.529273] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2052eb5b-d32b-4450-89c5-35b670c0da80 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.533038] env[69328]: DEBUG nova.network.neutron [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Successfully created port: b90c50eb-decb-4850-8c7e-af0b3b67eaf0 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1028.539881] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ac21246-ff6b-4709-8097-343206e2ffe8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.570921] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5be5185a-7fee-4365-b474-d67f8b65a8f5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.579379] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c863edbc-f0c2-4142-8d71-42bdd3d20352 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.593393] env[69328]: DEBUG nova.compute.provider_tree [None req-7b0be2c6-034d-44a2-9992-d33f2e03ccd0 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1028.637825] env[69328]: DEBUG nova.compute.manager [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1028.710146] env[69328]: DEBUG nova.network.neutron [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1028.762648] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-9235f748-0f4c-40f7-834f-2e27c2f2019f tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1028.762648] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-759f7862-a558-4a42-8441-7952a7e4f7ad {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.771297] env[69328]: DEBUG oslo_vmware.api [None req-9235f748-0f4c-40f7-834f-2e27c2f2019f tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Waiting for the task: (returnval){ [ 1028.771297] env[69328]: value = "task-3273841" [ 1028.771297] env[69328]: _type = "Task" [ 1028.771297] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.782459] env[69328]: DEBUG oslo_vmware.api [None req-9235f748-0f4c-40f7-834f-2e27c2f2019f tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273841, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.845354] env[69328]: DEBUG nova.network.neutron [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Updating instance_info_cache with network_info: [{"id": "e957681a-e4bc-4b9a-b2b7-a4783ae059b8", "address": "fa:16:3e:be:0a:24", "network": {"id": "620f277d-ca49-41da-83a0-afb8c393e26e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1223326239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cdc479a290524130b9d17e627a64b65a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86a35d07-53d3-46b3-92cb-ae34236c0f41", "external-id": "nsx-vlan-transportzone-811", "segmentation_id": 811, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape957681a-e4", "ovs_interfaceid": "e957681a-e4bc-4b9a-b2b7-a4783ae059b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1028.872218] env[69328]: DEBUG oslo_vmware.api [None req-81ca0695-bba3-49b9-ac61-689ae87a5224 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273840, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.161715} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.872479] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-81ca0695-bba3-49b9-ac61-689ae87a5224 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1028.872671] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-81ca0695-bba3-49b9-ac61-689ae87a5224 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1028.872869] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-81ca0695-bba3-49b9-ac61-689ae87a5224 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1028.873056] env[69328]: INFO nova.compute.manager [None req-81ca0695-bba3-49b9-ac61-689ae87a5224 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1028.873338] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-81ca0695-bba3-49b9-ac61-689ae87a5224 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1028.873542] env[69328]: DEBUG nova.compute.manager [-] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1028.873634] env[69328]: DEBUG nova.network.neutron [-] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1028.924199] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1029.096481] env[69328]: DEBUG nova.scheduler.client.report [None req-7b0be2c6-034d-44a2-9992-d33f2e03ccd0 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1029.281740] env[69328]: DEBUG oslo_vmware.api [None req-9235f748-0f4c-40f7-834f-2e27c2f2019f tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273841, 'name': PowerOffVM_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.348058] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Releasing lock "refresh_cache-204286d7-c806-48cb-85e9-b2a78571777c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1029.348421] env[69328]: DEBUG nova.compute.manager [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Instance network_info: |[{"id": "e957681a-e4bc-4b9a-b2b7-a4783ae059b8", "address": "fa:16:3e:be:0a:24", "network": {"id": "620f277d-ca49-41da-83a0-afb8c393e26e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1223326239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cdc479a290524130b9d17e627a64b65a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86a35d07-53d3-46b3-92cb-ae34236c0f41", "external-id": "nsx-vlan-transportzone-811", "segmentation_id": 811, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape957681a-e4", "ovs_interfaceid": "e957681a-e4bc-4b9a-b2b7-a4783ae059b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1029.348869] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:be:0a:24', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '86a35d07-53d3-46b3-92cb-ae34236c0f41', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e957681a-e4bc-4b9a-b2b7-a4783ae059b8', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1029.358117] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1029.358383] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1029.358625] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4c6c2f4c-90dc-4e86-ac23-8046d4cfa9a8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.379432] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1029.379432] env[69328]: value = "task-3273842" [ 1029.379432] env[69328]: _type = "Task" [ 1029.379432] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.388250] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273842, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.414096] env[69328]: DEBUG nova.compute.manager [req-f893d5de-b5b0-4f2f-bc2f-984afb11888f req-bdc3f530-712b-4994-9dee-f7266ca7d578 service nova] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Received event network-vif-deleted-6fa9c0fb-f285-4d44-8824-09041fd2f8f6 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1029.414250] env[69328]: INFO nova.compute.manager [req-f893d5de-b5b0-4f2f-bc2f-984afb11888f req-bdc3f530-712b-4994-9dee-f7266ca7d578 service nova] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Neutron deleted interface 6fa9c0fb-f285-4d44-8824-09041fd2f8f6; detaching it from the instance and deleting it from the info cache [ 1029.414423] env[69328]: DEBUG nova.network.neutron [req-f893d5de-b5b0-4f2f-bc2f-984afb11888f req-bdc3f530-712b-4994-9dee-f7266ca7d578 service nova] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1029.606132] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7b0be2c6-034d-44a2-9992-d33f2e03ccd0 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.974s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1029.607518] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.436s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1029.610028] env[69328]: INFO nova.compute.claims [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1029.614629] env[69328]: DEBUG nova.compute.manager [req-4d355841-eba8-4d68-ae35-96977e1d4b2e req-9eaa1930-bcc5-4bf5-979d-1726cecb1bb7 service nova] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Received event network-changed-e957681a-e4bc-4b9a-b2b7-a4783ae059b8 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1029.615065] env[69328]: DEBUG nova.compute.manager [req-4d355841-eba8-4d68-ae35-96977e1d4b2e req-9eaa1930-bcc5-4bf5-979d-1726cecb1bb7 service nova] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Refreshing instance network info cache due to event network-changed-e957681a-e4bc-4b9a-b2b7-a4783ae059b8. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1029.615506] env[69328]: DEBUG oslo_concurrency.lockutils [req-4d355841-eba8-4d68-ae35-96977e1d4b2e req-9eaa1930-bcc5-4bf5-979d-1726cecb1bb7 service nova] Acquiring lock "refresh_cache-204286d7-c806-48cb-85e9-b2a78571777c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1029.615843] env[69328]: DEBUG oslo_concurrency.lockutils [req-4d355841-eba8-4d68-ae35-96977e1d4b2e req-9eaa1930-bcc5-4bf5-979d-1726cecb1bb7 service nova] Acquired lock "refresh_cache-204286d7-c806-48cb-85e9-b2a78571777c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1029.617761] env[69328]: DEBUG nova.network.neutron [req-4d355841-eba8-4d68-ae35-96977e1d4b2e req-9eaa1930-bcc5-4bf5-979d-1726cecb1bb7 service nova] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Refreshing network info cache for port e957681a-e4bc-4b9a-b2b7-a4783ae059b8 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1029.630165] env[69328]: INFO nova.scheduler.client.report [None req-7b0be2c6-034d-44a2-9992-d33f2e03ccd0 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Deleted allocations for instance 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3 [ 1029.648435] env[69328]: DEBUG nova.compute.manager [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1029.677173] env[69328]: DEBUG nova.virt.hardware [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1029.677865] env[69328]: DEBUG nova.virt.hardware [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1029.677865] env[69328]: DEBUG nova.virt.hardware [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1029.677865] env[69328]: DEBUG nova.virt.hardware [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1029.677977] env[69328]: DEBUG nova.virt.hardware [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1029.678188] env[69328]: DEBUG nova.virt.hardware [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1029.678430] env[69328]: DEBUG nova.virt.hardware [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1029.678611] env[69328]: DEBUG nova.virt.hardware [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1029.678801] env[69328]: DEBUG nova.virt.hardware [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1029.678993] env[69328]: DEBUG nova.virt.hardware [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1029.679397] env[69328]: DEBUG nova.virt.hardware [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1029.680188] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a29376ad-736c-47e9-b6aa-ae894d2b9f33 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.693216] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcd4b21c-0b05-4529-8c10-49a6aac7394b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.781680] env[69328]: DEBUG oslo_vmware.api [None req-9235f748-0f4c-40f7-834f-2e27c2f2019f tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273841, 'name': PowerOffVM_Task, 'duration_secs': 0.536128} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.782047] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-9235f748-0f4c-40f7-834f-2e27c2f2019f tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1029.782238] env[69328]: DEBUG nova.compute.manager [None req-9235f748-0f4c-40f7-834f-2e27c2f2019f tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1029.783465] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c38d8a8-c04b-4480-90a8-5e480c1a28d8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.890563] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273842, 'name': CreateVM_Task, 'duration_secs': 0.36308} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.890767] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1029.891498] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1029.891655] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1029.892045] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1029.892330] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a0ef90b7-180e-4c00-adeb-482a5ebbe98d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.897286] env[69328]: DEBUG nova.network.neutron [-] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1029.898466] env[69328]: DEBUG oslo_vmware.api [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 1029.898466] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52c97845-fef5-3033-24f4-b56682a5f6fe" [ 1029.898466] env[69328]: _type = "Task" [ 1029.898466] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.909255] env[69328]: DEBUG oslo_vmware.api [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52c97845-fef5-3033-24f4-b56682a5f6fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.916502] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6d85cb3d-e852-4abe-a9b2-ef5052badfaf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.927205] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ffd6ccd-9e33-4c6d-b2a9-64fdb7fa67d3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.968083] env[69328]: DEBUG nova.compute.manager [req-f893d5de-b5b0-4f2f-bc2f-984afb11888f req-bdc3f530-712b-4994-9dee-f7266ca7d578 service nova] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Detach interface failed, port_id=6fa9c0fb-f285-4d44-8824-09041fd2f8f6, reason: Instance 3ba646e8-a5c8-4917-a1c4-32b37affb598 could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1030.097432] env[69328]: DEBUG nova.network.neutron [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Successfully updated port: b90c50eb-decb-4850-8c7e-af0b3b67eaf0 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1030.139636] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7b0be2c6-034d-44a2-9992-d33f2e03ccd0 tempest-ServersWithSpecificFlavorTestJSON-1760735864 tempest-ServersWithSpecificFlavorTestJSON-1760735864-project-member] Lock "3b4b6687-fb6d-4bb7-8604-20a3ba706ff3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.760s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1030.299290] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9235f748-0f4c-40f7-834f-2e27c2f2019f tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Lock "dd43adb3-b073-483a-81dd-69df7f746874" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.553s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1030.331884] env[69328]: DEBUG nova.network.neutron [req-4d355841-eba8-4d68-ae35-96977e1d4b2e req-9eaa1930-bcc5-4bf5-979d-1726cecb1bb7 service nova] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Updated VIF entry in instance network info cache for port e957681a-e4bc-4b9a-b2b7-a4783ae059b8. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1030.332324] env[69328]: DEBUG nova.network.neutron [req-4d355841-eba8-4d68-ae35-96977e1d4b2e req-9eaa1930-bcc5-4bf5-979d-1726cecb1bb7 service nova] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Updating instance_info_cache with network_info: [{"id": "e957681a-e4bc-4b9a-b2b7-a4783ae059b8", "address": "fa:16:3e:be:0a:24", "network": {"id": "620f277d-ca49-41da-83a0-afb8c393e26e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1223326239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cdc479a290524130b9d17e627a64b65a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86a35d07-53d3-46b3-92cb-ae34236c0f41", "external-id": "nsx-vlan-transportzone-811", "segmentation_id": 811, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape957681a-e4", "ovs_interfaceid": "e957681a-e4bc-4b9a-b2b7-a4783ae059b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1030.400717] env[69328]: INFO nova.compute.manager [-] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Took 1.53 seconds to deallocate network for instance. [ 1030.417043] env[69328]: DEBUG oslo_vmware.api [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52c97845-fef5-3033-24f4-b56682a5f6fe, 'name': SearchDatastore_Task, 'duration_secs': 0.011583} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.417043] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1030.417043] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1030.417043] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1030.417043] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1030.417043] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1030.417267] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-75dd5b69-1941-47e0-a95e-5e6ad1f9779b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.426592] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1030.426763] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1030.427490] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-95a94161-635f-4147-87d9-e4bd283d7c8d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.433620] env[69328]: DEBUG oslo_vmware.api [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 1030.433620] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5234c2e2-f0f3-b9db-93d7-8d19d07e1d6f" [ 1030.433620] env[69328]: _type = "Task" [ 1030.433620] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.443493] env[69328]: DEBUG oslo_vmware.api [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5234c2e2-f0f3-b9db-93d7-8d19d07e1d6f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.601801] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquiring lock "refresh_cache-c7321021-15ea-47f4-a8ca-1045f2966394" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1030.606107] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquired lock "refresh_cache-c7321021-15ea-47f4-a8ca-1045f2966394" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1030.606107] env[69328]: DEBUG nova.network.neutron [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1030.834922] env[69328]: DEBUG oslo_concurrency.lockutils [req-4d355841-eba8-4d68-ae35-96977e1d4b2e req-9eaa1930-bcc5-4bf5-979d-1726cecb1bb7 service nova] Releasing lock "refresh_cache-204286d7-c806-48cb-85e9-b2a78571777c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1030.910672] env[69328]: DEBUG oslo_concurrency.lockutils [None req-81ca0695-bba3-49b9-ac61-689ae87a5224 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1030.943872] env[69328]: DEBUG oslo_vmware.api [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5234c2e2-f0f3-b9db-93d7-8d19d07e1d6f, 'name': SearchDatastore_Task, 'duration_secs': 0.011542} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.947034] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-333625a2-d8f0-4152-87c2-6ead4d55fc20 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.952234] env[69328]: DEBUG oslo_vmware.api [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 1030.952234] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d0f872-a699-daa9-241e-20422bb6c458" [ 1030.952234] env[69328]: _type = "Task" [ 1030.952234] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.961285] env[69328]: DEBUG oslo_vmware.api [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d0f872-a699-daa9-241e-20422bb6c458, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.994419] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db92fdf9-37c8-4013-b64c-39392943b971 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.002258] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f87bc99-2b66-4843-b541-131bcb7d106b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.040673] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3af5acf1-00b1-4269-bb09-be2e6bc1ca9f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.050675] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e47e8eb-ca9a-4f1e-88ea-0443019ac221 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.066972] env[69328]: DEBUG nova.compute.provider_tree [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1031.153771] env[69328]: DEBUG nova.network.neutron [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1031.334246] env[69328]: DEBUG nova.network.neutron [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Updating instance_info_cache with network_info: [{"id": "b90c50eb-decb-4850-8c7e-af0b3b67eaf0", "address": "fa:16:3e:92:1a:33", "network": {"id": "77f17547-8c62-4a31-9840-8d2cbb1bfbab", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-163692077-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bad0df17bba4bc996fe5cf1faf23fad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb90c50eb-de", "ovs_interfaceid": "b90c50eb-decb-4850-8c7e-af0b3b67eaf0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1031.465257] env[69328]: DEBUG oslo_vmware.api [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d0f872-a699-daa9-241e-20422bb6c458, 'name': SearchDatastore_Task, 'duration_secs': 0.009807} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.465574] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1031.465875] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 204286d7-c806-48cb-85e9-b2a78571777c/204286d7-c806-48cb-85e9-b2a78571777c.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1031.466159] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2d0b3539-8d61-4db8-b75b-6406c7ecad80 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.473971] env[69328]: DEBUG oslo_vmware.api [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 1031.473971] env[69328]: value = "task-3273843" [ 1031.473971] env[69328]: _type = "Task" [ 1031.473971] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.482860] env[69328]: DEBUG oslo_vmware.api [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273843, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.509888] env[69328]: DEBUG nova.objects.instance [None req-037d04d2-d1dd-4a35-8a91-516ee31a37ac tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Lazy-loading 'flavor' on Instance uuid dd43adb3-b073-483a-81dd-69df7f746874 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1031.575600] env[69328]: DEBUG nova.scheduler.client.report [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1031.649449] env[69328]: DEBUG nova.compute.manager [req-16755985-6c31-4bac-a658-1d4b410d7da1 req-477808b8-dadd-4ab7-aafa-899a33e5a1f4 service nova] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Received event network-vif-plugged-b90c50eb-decb-4850-8c7e-af0b3b67eaf0 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1031.649678] env[69328]: DEBUG oslo_concurrency.lockutils [req-16755985-6c31-4bac-a658-1d4b410d7da1 req-477808b8-dadd-4ab7-aafa-899a33e5a1f4 service nova] Acquiring lock "c7321021-15ea-47f4-a8ca-1045f2966394-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1031.649893] env[69328]: DEBUG oslo_concurrency.lockutils [req-16755985-6c31-4bac-a658-1d4b410d7da1 req-477808b8-dadd-4ab7-aafa-899a33e5a1f4 service nova] Lock "c7321021-15ea-47f4-a8ca-1045f2966394-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1031.651292] env[69328]: DEBUG oslo_concurrency.lockutils [req-16755985-6c31-4bac-a658-1d4b410d7da1 req-477808b8-dadd-4ab7-aafa-899a33e5a1f4 service nova] Lock "c7321021-15ea-47f4-a8ca-1045f2966394-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1031.651589] env[69328]: DEBUG nova.compute.manager [req-16755985-6c31-4bac-a658-1d4b410d7da1 req-477808b8-dadd-4ab7-aafa-899a33e5a1f4 service nova] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] No waiting events found dispatching network-vif-plugged-b90c50eb-decb-4850-8c7e-af0b3b67eaf0 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1031.651813] env[69328]: WARNING nova.compute.manager [req-16755985-6c31-4bac-a658-1d4b410d7da1 req-477808b8-dadd-4ab7-aafa-899a33e5a1f4 service nova] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Received unexpected event network-vif-plugged-b90c50eb-decb-4850-8c7e-af0b3b67eaf0 for instance with vm_state building and task_state spawning. [ 1031.652096] env[69328]: DEBUG nova.compute.manager [req-16755985-6c31-4bac-a658-1d4b410d7da1 req-477808b8-dadd-4ab7-aafa-899a33e5a1f4 service nova] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Received event network-changed-b90c50eb-decb-4850-8c7e-af0b3b67eaf0 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1031.652495] env[69328]: DEBUG nova.compute.manager [req-16755985-6c31-4bac-a658-1d4b410d7da1 req-477808b8-dadd-4ab7-aafa-899a33e5a1f4 service nova] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Refreshing instance network info cache due to event network-changed-b90c50eb-decb-4850-8c7e-af0b3b67eaf0. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1031.652549] env[69328]: DEBUG oslo_concurrency.lockutils [req-16755985-6c31-4bac-a658-1d4b410d7da1 req-477808b8-dadd-4ab7-aafa-899a33e5a1f4 service nova] Acquiring lock "refresh_cache-c7321021-15ea-47f4-a8ca-1045f2966394" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1031.690218] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1031.690527] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1031.837381] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Releasing lock "refresh_cache-c7321021-15ea-47f4-a8ca-1045f2966394" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1031.837847] env[69328]: DEBUG nova.compute.manager [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Instance network_info: |[{"id": "b90c50eb-decb-4850-8c7e-af0b3b67eaf0", "address": "fa:16:3e:92:1a:33", "network": {"id": "77f17547-8c62-4a31-9840-8d2cbb1bfbab", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-163692077-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bad0df17bba4bc996fe5cf1faf23fad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb90c50eb-de", "ovs_interfaceid": "b90c50eb-decb-4850-8c7e-af0b3b67eaf0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1031.838337] env[69328]: DEBUG oslo_concurrency.lockutils [req-16755985-6c31-4bac-a658-1d4b410d7da1 req-477808b8-dadd-4ab7-aafa-899a33e5a1f4 service nova] Acquired lock "refresh_cache-c7321021-15ea-47f4-a8ca-1045f2966394" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1031.838595] env[69328]: DEBUG nova.network.neutron [req-16755985-6c31-4bac-a658-1d4b410d7da1 req-477808b8-dadd-4ab7-aafa-899a33e5a1f4 service nova] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Refreshing network info cache for port b90c50eb-decb-4850-8c7e-af0b3b67eaf0 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1031.840280] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:92:1a:33', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '62237242-7ce2-4664-a1c5-6783b516b507', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b90c50eb-decb-4850-8c7e-af0b3b67eaf0', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1031.849023] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1031.850410] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1031.850653] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-79233f5a-6dcb-4ddb-a7d8-b791053b2e81 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.874403] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1031.874403] env[69328]: value = "task-3273844" [ 1031.874403] env[69328]: _type = "Task" [ 1031.874403] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.886769] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273844, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.989078] env[69328]: DEBUG oslo_vmware.api [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273843, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.014890] env[69328]: DEBUG oslo_concurrency.lockutils [None req-037d04d2-d1dd-4a35-8a91-516ee31a37ac tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Acquiring lock "refresh_cache-dd43adb3-b073-483a-81dd-69df7f746874" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1032.015169] env[69328]: DEBUG oslo_concurrency.lockutils [None req-037d04d2-d1dd-4a35-8a91-516ee31a37ac tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Acquired lock "refresh_cache-dd43adb3-b073-483a-81dd-69df7f746874" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1032.015500] env[69328]: DEBUG nova.network.neutron [None req-037d04d2-d1dd-4a35-8a91-516ee31a37ac tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1032.015668] env[69328]: DEBUG nova.objects.instance [None req-037d04d2-d1dd-4a35-8a91-516ee31a37ac tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Lazy-loading 'info_cache' on Instance uuid dd43adb3-b073-483a-81dd-69df7f746874 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1032.083988] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.474s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1032.083988] env[69328]: DEBUG nova.compute.manager [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1032.085989] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.631s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1032.087213] env[69328]: INFO nova.compute.claims [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1032.196602] env[69328]: DEBUG nova.compute.manager [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1032.388904] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273844, 'name': CreateVM_Task} progress is 25%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.487043] env[69328]: DEBUG oslo_vmware.api [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273843, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.524385} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.487471] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 204286d7-c806-48cb-85e9-b2a78571777c/204286d7-c806-48cb-85e9-b2a78571777c.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1032.487823] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1032.488202] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e30bc183-35b1-4c45-b4e0-5b992156db30 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.499922] env[69328]: DEBUG oslo_vmware.api [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 1032.499922] env[69328]: value = "task-3273845" [ 1032.499922] env[69328]: _type = "Task" [ 1032.499922] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.511142] env[69328]: DEBUG oslo_vmware.api [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273845, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.519775] env[69328]: DEBUG nova.objects.base [None req-037d04d2-d1dd-4a35-8a91-516ee31a37ac tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=69328) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1032.591578] env[69328]: DEBUG nova.compute.utils [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1032.596694] env[69328]: DEBUG nova.compute.manager [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1032.597121] env[69328]: DEBUG nova.network.neutron [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1032.690195] env[69328]: DEBUG nova.policy [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '19265c910cd04814978013416bf2a18a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '636412f89c9d488a9cfd6f19ef046efc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 1032.725162] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1032.864108] env[69328]: DEBUG nova.network.neutron [req-16755985-6c31-4bac-a658-1d4b410d7da1 req-477808b8-dadd-4ab7-aafa-899a33e5a1f4 service nova] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Updated VIF entry in instance network info cache for port b90c50eb-decb-4850-8c7e-af0b3b67eaf0. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1032.864108] env[69328]: DEBUG nova.network.neutron [req-16755985-6c31-4bac-a658-1d4b410d7da1 req-477808b8-dadd-4ab7-aafa-899a33e5a1f4 service nova] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Updating instance_info_cache with network_info: [{"id": "b90c50eb-decb-4850-8c7e-af0b3b67eaf0", "address": "fa:16:3e:92:1a:33", "network": {"id": "77f17547-8c62-4a31-9840-8d2cbb1bfbab", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-163692077-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bad0df17bba4bc996fe5cf1faf23fad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb90c50eb-de", "ovs_interfaceid": "b90c50eb-decb-4850-8c7e-af0b3b67eaf0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1032.889021] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273844, 'name': CreateVM_Task, 'duration_secs': 0.791039} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.889021] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1032.889021] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1032.889021] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1032.889021] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1032.889021] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-43e5f613-83d6-4030-85c5-98179c1bb279 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.893125] env[69328]: DEBUG oslo_vmware.api [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 1032.893125] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b92f1c-8953-799f-3ed4-80b069a8862f" [ 1032.893125] env[69328]: _type = "Task" [ 1032.893125] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.901842] env[69328]: DEBUG oslo_vmware.api [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b92f1c-8953-799f-3ed4-80b069a8862f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.012047] env[69328]: DEBUG oslo_vmware.api [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273845, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076371} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.012047] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1033.013925] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62874b30-1af9-417d-9b59-45b8c90be0d1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.040649] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Reconfiguring VM instance instance-00000060 to attach disk [datastore2] 204286d7-c806-48cb-85e9-b2a78571777c/204286d7-c806-48cb-85e9-b2a78571777c.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1033.041422] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6fd45fe0-308e-44ea-8c88-1dd840a18114 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.063932] env[69328]: DEBUG oslo_vmware.api [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 1033.063932] env[69328]: value = "task-3273846" [ 1033.063932] env[69328]: _type = "Task" [ 1033.063932] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.073485] env[69328]: DEBUG oslo_vmware.api [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273846, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.097750] env[69328]: DEBUG nova.compute.manager [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1033.109305] env[69328]: DEBUG nova.network.neutron [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Successfully created port: 92b25b0d-9caa-4faa-ae5a-417a735a03f7 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1033.368668] env[69328]: DEBUG oslo_concurrency.lockutils [req-16755985-6c31-4bac-a658-1d4b410d7da1 req-477808b8-dadd-4ab7-aafa-899a33e5a1f4 service nova] Releasing lock "refresh_cache-c7321021-15ea-47f4-a8ca-1045f2966394" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1033.409952] env[69328]: DEBUG oslo_vmware.api [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b92f1c-8953-799f-3ed4-80b069a8862f, 'name': SearchDatastore_Task, 'duration_secs': 0.010571} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.410435] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1033.410536] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1033.410778] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1033.410924] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1033.411120] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1033.411723] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fe6471a2-0348-44ad-9ad9-9e854d3761d2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.424531] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1033.424675] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1033.425513] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cbe90007-85b3-4a67-b8bd-37408fba6d1a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.433228] env[69328]: DEBUG oslo_vmware.api [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 1033.433228] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]527ff14c-ba19-a7e3-7024-02f7af90af3f" [ 1033.433228] env[69328]: _type = "Task" [ 1033.433228] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.443672] env[69328]: DEBUG oslo_vmware.api [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]527ff14c-ba19-a7e3-7024-02f7af90af3f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.445233] env[69328]: DEBUG nova.network.neutron [None req-037d04d2-d1dd-4a35-8a91-516ee31a37ac tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Updating instance_info_cache with network_info: [{"id": "e0c14c41-b680-40a2-a769-2b4191814a41", "address": "fa:16:3e:1b:39:92", "network": {"id": "bd599844-1a77-4107-a344-7b4ffd53ea20", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1763799815-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f5cd4dfb0b54081aba7bf8620521193", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b83383f-ed7a-4efd-aef7-aa8c15649d07", "external-id": "nsx-vlan-transportzone-282", "segmentation_id": 282, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0c14c41-b6", "ovs_interfaceid": "e0c14c41-b680-40a2-a769-2b4191814a41", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1033.575950] env[69328]: DEBUG oslo_vmware.api [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273846, 'name': ReconfigVM_Task, 'duration_secs': 0.323292} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.581591] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Reconfigured VM instance instance-00000060 to attach disk [datastore2] 204286d7-c806-48cb-85e9-b2a78571777c/204286d7-c806-48cb-85e9-b2a78571777c.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1033.582671] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8d9d0056-b444-448b-95a4-54a4f6d47c39 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.592044] env[69328]: DEBUG oslo_vmware.api [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 1033.592044] env[69328]: value = "task-3273847" [ 1033.592044] env[69328]: _type = "Task" [ 1033.592044] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.603622] env[69328]: DEBUG oslo_vmware.api [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273847, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.608753] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e93ab630-3d56-456a-8f1c-e1be5e18fad1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.618818] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be37a554-9cc8-4ed3-8dd4-91b3a69acdb0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.657652] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b5cf5e6-0d73-452e-acf7-dab5a2b70856 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.666752] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8764868d-7bf3-4432-bc0d-94d7c3bccbfa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.683470] env[69328]: DEBUG nova.compute.provider_tree [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1033.944024] env[69328]: DEBUG oslo_vmware.api [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]527ff14c-ba19-a7e3-7024-02f7af90af3f, 'name': SearchDatastore_Task, 'duration_secs': 0.011735} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.945045] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d528005f-b2cb-45cd-b274-7bdbf2d752ac {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.951713] env[69328]: DEBUG oslo_concurrency.lockutils [None req-037d04d2-d1dd-4a35-8a91-516ee31a37ac tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Releasing lock "refresh_cache-dd43adb3-b073-483a-81dd-69df7f746874" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1033.951932] env[69328]: DEBUG oslo_vmware.api [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 1033.951932] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5248642c-4827-a3ef-a3bb-676e386b64cb" [ 1033.951932] env[69328]: _type = "Task" [ 1033.951932] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.966310] env[69328]: DEBUG oslo_vmware.api [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5248642c-4827-a3ef-a3bb-676e386b64cb, 'name': SearchDatastore_Task, 'duration_secs': 0.010593} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.966616] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1033.967496] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] c7321021-15ea-47f4-a8ca-1045f2966394/c7321021-15ea-47f4-a8ca-1045f2966394.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1033.968504] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-37b4c5ad-1d9b-4f74-b347-b08cf93f89a7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.981399] env[69328]: DEBUG oslo_vmware.api [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 1033.981399] env[69328]: value = "task-3273848" [ 1033.981399] env[69328]: _type = "Task" [ 1033.981399] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.994733] env[69328]: DEBUG oslo_vmware.api [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273848, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.105599] env[69328]: DEBUG oslo_vmware.api [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273847, 'name': Rename_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.113950] env[69328]: DEBUG nova.compute.manager [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1034.141294] env[69328]: DEBUG nova.virt.hardware [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1034.141547] env[69328]: DEBUG nova.virt.hardware [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1034.141702] env[69328]: DEBUG nova.virt.hardware [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1034.141883] env[69328]: DEBUG nova.virt.hardware [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1034.142051] env[69328]: DEBUG nova.virt.hardware [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1034.142266] env[69328]: DEBUG nova.virt.hardware [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1034.142497] env[69328]: DEBUG nova.virt.hardware [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1034.142659] env[69328]: DEBUG nova.virt.hardware [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1034.142827] env[69328]: DEBUG nova.virt.hardware [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1034.143027] env[69328]: DEBUG nova.virt.hardware [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1034.143247] env[69328]: DEBUG nova.virt.hardware [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1034.144112] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a9ad7be-842a-4147-89cf-b18b9ca97d25 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.152847] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a78a8ab1-f55a-412b-94e6-bf7d1343ca77 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.187642] env[69328]: DEBUG nova.scheduler.client.report [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1034.493198] env[69328]: DEBUG oslo_vmware.api [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273848, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.572865] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c1b9677b-6ae4-4ff8-b72c-b37efc8cb7ac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Acquiring lock "55d9ba65-e5c8-446a-a209-a840f30ff02c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1034.572865] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c1b9677b-6ae4-4ff8-b72c-b37efc8cb7ac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Lock "55d9ba65-e5c8-446a-a209-a840f30ff02c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1034.572865] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c1b9677b-6ae4-4ff8-b72c-b37efc8cb7ac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Acquiring lock "55d9ba65-e5c8-446a-a209-a840f30ff02c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1034.572865] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c1b9677b-6ae4-4ff8-b72c-b37efc8cb7ac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Lock "55d9ba65-e5c8-446a-a209-a840f30ff02c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1034.572865] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c1b9677b-6ae4-4ff8-b72c-b37efc8cb7ac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Lock "55d9ba65-e5c8-446a-a209-a840f30ff02c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1034.573675] env[69328]: INFO nova.compute.manager [None req-c1b9677b-6ae4-4ff8-b72c-b37efc8cb7ac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Terminating instance [ 1034.603132] env[69328]: DEBUG oslo_vmware.api [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273847, 'name': Rename_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.692425] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.607s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1034.692952] env[69328]: DEBUG nova.compute.manager [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1034.695488] env[69328]: DEBUG oslo_concurrency.lockutils [None req-be70f0a8-60ab-4579-aab1-4d42d40bbc27 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.699s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1034.695704] env[69328]: DEBUG nova.objects.instance [None req-be70f0a8-60ab-4579-aab1-4d42d40bbc27 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lazy-loading 'resources' on Instance uuid b21ff3c9-d53a-4065-a271-682c2f1b895d {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1034.748241] env[69328]: DEBUG nova.compute.manager [req-caa5ee62-008a-4956-8180-a871653f611c req-2137369d-19c8-481e-b2dc-7f0db2812b77 service nova] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Received event network-vif-plugged-92b25b0d-9caa-4faa-ae5a-417a735a03f7 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1034.748471] env[69328]: DEBUG oslo_concurrency.lockutils [req-caa5ee62-008a-4956-8180-a871653f611c req-2137369d-19c8-481e-b2dc-7f0db2812b77 service nova] Acquiring lock "6ad357d9-c35a-4fdb-8dd0-39a0617bf85e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1034.748680] env[69328]: DEBUG oslo_concurrency.lockutils [req-caa5ee62-008a-4956-8180-a871653f611c req-2137369d-19c8-481e-b2dc-7f0db2812b77 service nova] Lock "6ad357d9-c35a-4fdb-8dd0-39a0617bf85e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1034.748848] env[69328]: DEBUG oslo_concurrency.lockutils [req-caa5ee62-008a-4956-8180-a871653f611c req-2137369d-19c8-481e-b2dc-7f0db2812b77 service nova] Lock "6ad357d9-c35a-4fdb-8dd0-39a0617bf85e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1034.749028] env[69328]: DEBUG nova.compute.manager [req-caa5ee62-008a-4956-8180-a871653f611c req-2137369d-19c8-481e-b2dc-7f0db2812b77 service nova] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] No waiting events found dispatching network-vif-plugged-92b25b0d-9caa-4faa-ae5a-417a735a03f7 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1034.749203] env[69328]: WARNING nova.compute.manager [req-caa5ee62-008a-4956-8180-a871653f611c req-2137369d-19c8-481e-b2dc-7f0db2812b77 service nova] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Received unexpected event network-vif-plugged-92b25b0d-9caa-4faa-ae5a-417a735a03f7 for instance with vm_state building and task_state spawning. [ 1034.961225] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-037d04d2-d1dd-4a35-8a91-516ee31a37ac tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1034.961559] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fda4d718-0827-475b-bd72-1109fef60e4f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.970878] env[69328]: DEBUG oslo_vmware.api [None req-037d04d2-d1dd-4a35-8a91-516ee31a37ac tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Waiting for the task: (returnval){ [ 1034.970878] env[69328]: value = "task-3273849" [ 1034.970878] env[69328]: _type = "Task" [ 1034.970878] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.982990] env[69328]: DEBUG oslo_vmware.api [None req-037d04d2-d1dd-4a35-8a91-516ee31a37ac tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273849, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.995862] env[69328]: DEBUG oslo_vmware.api [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273848, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.565348} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.996115] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] c7321021-15ea-47f4-a8ca-1045f2966394/c7321021-15ea-47f4-a8ca-1045f2966394.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1034.996327] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1034.996567] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-62934e84-eab3-47e3-a29b-345d906ca059 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.006056] env[69328]: DEBUG oslo_vmware.api [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 1035.006056] env[69328]: value = "task-3273850" [ 1035.006056] env[69328]: _type = "Task" [ 1035.006056] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.016940] env[69328]: DEBUG oslo_vmware.api [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273850, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.078174] env[69328]: DEBUG nova.compute.manager [None req-c1b9677b-6ae4-4ff8-b72c-b37efc8cb7ac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1035.078781] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c1b9677b-6ae4-4ff8-b72c-b37efc8cb7ac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1035.081896] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30a94c8d-a30c-43e5-b038-299252ebad65 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.091780] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1b9677b-6ae4-4ff8-b72c-b37efc8cb7ac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1035.091996] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-37376274-b79e-4213-a621-7a8baf76e20c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.105960] env[69328]: DEBUG oslo_vmware.api [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273847, 'name': Rename_Task, 'duration_secs': 1.1599} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.112657] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1035.113093] env[69328]: DEBUG oslo_vmware.api [None req-c1b9677b-6ae4-4ff8-b72c-b37efc8cb7ac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Waiting for the task: (returnval){ [ 1035.113093] env[69328]: value = "task-3273851" [ 1035.113093] env[69328]: _type = "Task" [ 1035.113093] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.113970] env[69328]: DEBUG nova.network.neutron [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Successfully updated port: 92b25b0d-9caa-4faa-ae5a-417a735a03f7 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1035.116143] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cb4f5022-5db4-4639-abc7-815eb36c0800 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.132431] env[69328]: DEBUG oslo_vmware.api [None req-c1b9677b-6ae4-4ff8-b72c-b37efc8cb7ac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Task: {'id': task-3273851, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.134616] env[69328]: DEBUG oslo_vmware.api [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 1035.134616] env[69328]: value = "task-3273852" [ 1035.134616] env[69328]: _type = "Task" [ 1035.134616] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.145035] env[69328]: DEBUG oslo_vmware.api [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273852, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.198452] env[69328]: DEBUG nova.compute.utils [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1035.204076] env[69328]: DEBUG nova.compute.manager [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1035.204332] env[69328]: DEBUG nova.network.neutron [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1035.267461] env[69328]: DEBUG nova.policy [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a8fbe2a134194d29af48ac8e4986d0cb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd86de4d5055642aa86a29c6768e3db46', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 1035.484217] env[69328]: DEBUG oslo_vmware.api [None req-037d04d2-d1dd-4a35-8a91-516ee31a37ac tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273849, 'name': PowerOnVM_Task} progress is 90%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.516897] env[69328]: DEBUG oslo_vmware.api [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273850, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075152} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.517594] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1035.518883] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dc4abcd-d0a4-4aef-8a84-bf70d3786ee5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.551128] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Reconfiguring VM instance instance-00000061 to attach disk [datastore2] c7321021-15ea-47f4-a8ca-1045f2966394/c7321021-15ea-47f4-a8ca-1045f2966394.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1035.555228] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b97be78c-55bd-4b05-9ff0-af401917c98e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.579016] env[69328]: DEBUG oslo_vmware.api [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 1035.579016] env[69328]: value = "task-3273853" [ 1035.579016] env[69328]: _type = "Task" [ 1035.579016] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.592921] env[69328]: DEBUG oslo_vmware.api [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273853, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.620480] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquiring lock "refresh_cache-6ad357d9-c35a-4fdb-8dd0-39a0617bf85e" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.620591] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquired lock "refresh_cache-6ad357d9-c35a-4fdb-8dd0-39a0617bf85e" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1035.621239] env[69328]: DEBUG nova.network.neutron [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1035.633944] env[69328]: DEBUG oslo_vmware.api [None req-c1b9677b-6ae4-4ff8-b72c-b37efc8cb7ac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Task: {'id': task-3273851, 'name': PowerOffVM_Task, 'duration_secs': 0.359103} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.634449] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1b9677b-6ae4-4ff8-b72c-b37efc8cb7ac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1035.634706] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c1b9677b-6ae4-4ff8-b72c-b37efc8cb7ac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1035.635069] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-705d91d4-b050-4760-a4eb-9f74b47d3ed1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.662927] env[69328]: DEBUG oslo_vmware.api [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273852, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.670985] env[69328]: DEBUG nova.network.neutron [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] Successfully created port: 7db792d2-ce9f-4333-b755-84eb8e83d788 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1035.704959] env[69328]: DEBUG nova.compute.manager [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1035.722765] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1adddbe5-2ba2-40c4-827c-4609003fc5bb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.732304] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59e307f7-f40f-469b-9527-5924147a624f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.737657] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c1b9677b-6ae4-4ff8-b72c-b37efc8cb7ac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1035.737931] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c1b9677b-6ae4-4ff8-b72c-b37efc8cb7ac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1035.738165] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1b9677b-6ae4-4ff8-b72c-b37efc8cb7ac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Deleting the datastore file [datastore2] 55d9ba65-e5c8-446a-a209-a840f30ff02c {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1035.738912] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2aa17249-7be9-48ca-a894-c9fc6f1ebc08 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.772554] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03388f7b-86cc-48d9-b138-dc51b6311cbe {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.775240] env[69328]: DEBUG oslo_vmware.api [None req-c1b9677b-6ae4-4ff8-b72c-b37efc8cb7ac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Waiting for the task: (returnval){ [ 1035.775240] env[69328]: value = "task-3273855" [ 1035.775240] env[69328]: _type = "Task" [ 1035.775240] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.782903] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba34b1ad-83dc-431e-bc9a-ba94458b2072 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.790225] env[69328]: DEBUG oslo_vmware.api [None req-c1b9677b-6ae4-4ff8-b72c-b37efc8cb7ac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Task: {'id': task-3273855, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.801846] env[69328]: DEBUG nova.compute.provider_tree [None req-be70f0a8-60ab-4579-aab1-4d42d40bbc27 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1035.984684] env[69328]: DEBUG oslo_vmware.api [None req-037d04d2-d1dd-4a35-8a91-516ee31a37ac tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273849, 'name': PowerOnVM_Task, 'duration_secs': 0.649821} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.984958] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-037d04d2-d1dd-4a35-8a91-516ee31a37ac tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1035.985189] env[69328]: DEBUG nova.compute.manager [None req-037d04d2-d1dd-4a35-8a91-516ee31a37ac tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1035.985985] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dce1c22a-aa78-4455-938d-de1a5974f99f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.089630] env[69328]: DEBUG oslo_vmware.api [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273853, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.150820] env[69328]: DEBUG oslo_vmware.api [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273852, 'name': PowerOnVM_Task, 'duration_secs': 0.534471} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.151119] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1036.151394] env[69328]: INFO nova.compute.manager [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Took 8.99 seconds to spawn the instance on the hypervisor. [ 1036.151588] env[69328]: DEBUG nova.compute.manager [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1036.152566] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-727cc600-3eec-4baa-9450-803e1205f7a2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.156404] env[69328]: DEBUG nova.network.neutron [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1036.286878] env[69328]: DEBUG oslo_vmware.api [None req-c1b9677b-6ae4-4ff8-b72c-b37efc8cb7ac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Task: {'id': task-3273855, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.300853} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.287169] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1b9677b-6ae4-4ff8-b72c-b37efc8cb7ac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1036.287359] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c1b9677b-6ae4-4ff8-b72c-b37efc8cb7ac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1036.287535] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c1b9677b-6ae4-4ff8-b72c-b37efc8cb7ac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1036.287706] env[69328]: INFO nova.compute.manager [None req-c1b9677b-6ae4-4ff8-b72c-b37efc8cb7ac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1036.287949] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c1b9677b-6ae4-4ff8-b72c-b37efc8cb7ac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1036.288150] env[69328]: DEBUG nova.compute.manager [-] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1036.288318] env[69328]: DEBUG nova.network.neutron [-] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1036.303965] env[69328]: DEBUG nova.scheduler.client.report [None req-be70f0a8-60ab-4579-aab1-4d42d40bbc27 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1036.311623] env[69328]: DEBUG nova.network.neutron [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Updating instance_info_cache with network_info: [{"id": "92b25b0d-9caa-4faa-ae5a-417a735a03f7", "address": "fa:16:3e:14:83:ad", "network": {"id": "5b6ce910-5dc1-49bb-ad4b-fe1a86509fc3", "bridge": "br-int", "label": "tempest-ImagesTestJSON-224847435-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "636412f89c9d488a9cfd6f19ef046efc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d1e1e320-ec56-4fcc-b6e9-30aa210d3b36", "external-id": "nsx-vlan-transportzone-447", "segmentation_id": 447, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92b25b0d-9c", "ovs_interfaceid": "92b25b0d-9caa-4faa-ae5a-417a735a03f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1036.594119] env[69328]: DEBUG oslo_vmware.api [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273853, 'name': ReconfigVM_Task, 'duration_secs': 0.519799} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.594119] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Reconfigured VM instance instance-00000061 to attach disk [datastore2] c7321021-15ea-47f4-a8ca-1045f2966394/c7321021-15ea-47f4-a8ca-1045f2966394.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1036.594119] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a82db9a7-2dcf-41d2-908a-9955c0b765e8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.599699] env[69328]: DEBUG oslo_vmware.api [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 1036.599699] env[69328]: value = "task-3273856" [ 1036.599699] env[69328]: _type = "Task" [ 1036.599699] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.612717] env[69328]: DEBUG oslo_vmware.api [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273856, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.670953] env[69328]: INFO nova.compute.manager [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Took 32.67 seconds to build instance. [ 1036.716810] env[69328]: DEBUG nova.compute.manager [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1036.743625] env[69328]: DEBUG nova.virt.hardware [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1036.743877] env[69328]: DEBUG nova.virt.hardware [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1036.744045] env[69328]: DEBUG nova.virt.hardware [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1036.744242] env[69328]: DEBUG nova.virt.hardware [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1036.744375] env[69328]: DEBUG nova.virt.hardware [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1036.744525] env[69328]: DEBUG nova.virt.hardware [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1036.744737] env[69328]: DEBUG nova.virt.hardware [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1036.744930] env[69328]: DEBUG nova.virt.hardware [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1036.745120] env[69328]: DEBUG nova.virt.hardware [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1036.745300] env[69328]: DEBUG nova.virt.hardware [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1036.745481] env[69328]: DEBUG nova.virt.hardware [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1036.746363] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06fc8b97-7997-4a82-8066-c83308dae150 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.756908] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9e121ae-a631-4c45-9b97-45911387fab7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.778594] env[69328]: DEBUG nova.compute.manager [req-2f354645-dd24-4589-af9f-a72fa4b07872 req-2c2002cf-31ff-4bdf-9c36-7a0d3d870bef service nova] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Received event network-changed-92b25b0d-9caa-4faa-ae5a-417a735a03f7 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1036.778820] env[69328]: DEBUG nova.compute.manager [req-2f354645-dd24-4589-af9f-a72fa4b07872 req-2c2002cf-31ff-4bdf-9c36-7a0d3d870bef service nova] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Refreshing instance network info cache due to event network-changed-92b25b0d-9caa-4faa-ae5a-417a735a03f7. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1036.779063] env[69328]: DEBUG oslo_concurrency.lockutils [req-2f354645-dd24-4589-af9f-a72fa4b07872 req-2c2002cf-31ff-4bdf-9c36-7a0d3d870bef service nova] Acquiring lock "refresh_cache-6ad357d9-c35a-4fdb-8dd0-39a0617bf85e" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1036.811229] env[69328]: DEBUG oslo_concurrency.lockutils [None req-be70f0a8-60ab-4579-aab1-4d42d40bbc27 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.114s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1036.811587] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.680s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1036.811786] env[69328]: DEBUG nova.objects.instance [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Lazy-loading 'pci_requests' on Instance uuid a0952fdf-5570-4112-bc4d-e9f9cee1599c {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1036.814629] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Releasing lock "refresh_cache-6ad357d9-c35a-4fdb-8dd0-39a0617bf85e" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1036.814629] env[69328]: DEBUG nova.compute.manager [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Instance network_info: |[{"id": "92b25b0d-9caa-4faa-ae5a-417a735a03f7", "address": "fa:16:3e:14:83:ad", "network": {"id": "5b6ce910-5dc1-49bb-ad4b-fe1a86509fc3", "bridge": "br-int", "label": "tempest-ImagesTestJSON-224847435-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "636412f89c9d488a9cfd6f19ef046efc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d1e1e320-ec56-4fcc-b6e9-30aa210d3b36", "external-id": "nsx-vlan-transportzone-447", "segmentation_id": 447, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92b25b0d-9c", "ovs_interfaceid": "92b25b0d-9caa-4faa-ae5a-417a735a03f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1036.814629] env[69328]: DEBUG oslo_concurrency.lockutils [req-2f354645-dd24-4589-af9f-a72fa4b07872 req-2c2002cf-31ff-4bdf-9c36-7a0d3d870bef service nova] Acquired lock "refresh_cache-6ad357d9-c35a-4fdb-8dd0-39a0617bf85e" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1036.814629] env[69328]: DEBUG nova.network.neutron [req-2f354645-dd24-4589-af9f-a72fa4b07872 req-2c2002cf-31ff-4bdf-9c36-7a0d3d870bef service nova] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Refreshing network info cache for port 92b25b0d-9caa-4faa-ae5a-417a735a03f7 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1036.815508] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:14:83:ad', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd1e1e320-ec56-4fcc-b6e9-30aa210d3b36', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '92b25b0d-9caa-4faa-ae5a-417a735a03f7', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1036.827744] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1036.830926] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1036.834876] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d9b75a30-f541-4fe6-8f6a-fc3a1bfec85a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.851046] env[69328]: DEBUG nova.network.neutron [-] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1036.852910] env[69328]: INFO nova.scheduler.client.report [None req-be70f0a8-60ab-4579-aab1-4d42d40bbc27 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Deleted allocations for instance b21ff3c9-d53a-4065-a271-682c2f1b895d [ 1036.861294] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1036.861294] env[69328]: value = "task-3273857" [ 1036.861294] env[69328]: _type = "Task" [ 1036.861294] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.870673] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273857, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.112226] env[69328]: DEBUG oslo_vmware.api [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273856, 'name': Rename_Task, 'duration_secs': 0.142476} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.112487] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1037.112743] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d570515e-2bd4-4e76-b0de-fb265a51060d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.120946] env[69328]: DEBUG oslo_vmware.api [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 1037.120946] env[69328]: value = "task-3273858" [ 1037.120946] env[69328]: _type = "Task" [ 1037.120946] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.128745] env[69328]: DEBUG oslo_vmware.api [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273858, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.173201] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0bd487cc-4a4c-497e-b3f8-73775954bb06 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "204286d7-c806-48cb-85e9-b2a78571777c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.182s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1037.174878] env[69328]: DEBUG nova.network.neutron [req-2f354645-dd24-4589-af9f-a72fa4b07872 req-2c2002cf-31ff-4bdf-9c36-7a0d3d870bef service nova] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Updated VIF entry in instance network info cache for port 92b25b0d-9caa-4faa-ae5a-417a735a03f7. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1037.175253] env[69328]: DEBUG nova.network.neutron [req-2f354645-dd24-4589-af9f-a72fa4b07872 req-2c2002cf-31ff-4bdf-9c36-7a0d3d870bef service nova] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Updating instance_info_cache with network_info: [{"id": "92b25b0d-9caa-4faa-ae5a-417a735a03f7", "address": "fa:16:3e:14:83:ad", "network": {"id": "5b6ce910-5dc1-49bb-ad4b-fe1a86509fc3", "bridge": "br-int", "label": "tempest-ImagesTestJSON-224847435-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "636412f89c9d488a9cfd6f19ef046efc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d1e1e320-ec56-4fcc-b6e9-30aa210d3b36", "external-id": "nsx-vlan-transportzone-447", "segmentation_id": 447, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92b25b0d-9c", "ovs_interfaceid": "92b25b0d-9caa-4faa-ae5a-417a735a03f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1037.317059] env[69328]: DEBUG nova.objects.instance [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Lazy-loading 'numa_topology' on Instance uuid a0952fdf-5570-4112-bc4d-e9f9cee1599c {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1037.342596] env[69328]: DEBUG nova.network.neutron [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] Successfully updated port: 7db792d2-ce9f-4333-b755-84eb8e83d788 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1037.355890] env[69328]: INFO nova.compute.manager [-] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Took 1.07 seconds to deallocate network for instance. [ 1037.367321] env[69328]: DEBUG oslo_concurrency.lockutils [None req-be70f0a8-60ab-4579-aab1-4d42d40bbc27 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lock "b21ff3c9-d53a-4065-a271-682c2f1b895d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.688s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1037.380650] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273857, 'name': CreateVM_Task, 'duration_secs': 0.496744} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.380828] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1037.382089] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1037.382218] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1037.383120] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1037.383120] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a02081bf-cbf6-4ba9-975a-08664f6cd1aa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.388668] env[69328]: DEBUG oslo_vmware.api [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 1037.388668] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52399e66-3d34-a74a-45fe-ffd887f21f6a" [ 1037.388668] env[69328]: _type = "Task" [ 1037.388668] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.401813] env[69328]: DEBUG oslo_vmware.api [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52399e66-3d34-a74a-45fe-ffd887f21f6a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.631640] env[69328]: DEBUG oslo_vmware.api [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273858, 'name': PowerOnVM_Task} progress is 94%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.678397] env[69328]: DEBUG oslo_concurrency.lockutils [req-2f354645-dd24-4589-af9f-a72fa4b07872 req-2c2002cf-31ff-4bdf-9c36-7a0d3d870bef service nova] Releasing lock "refresh_cache-6ad357d9-c35a-4fdb-8dd0-39a0617bf85e" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1037.819846] env[69328]: INFO nova.compute.claims [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1037.849378] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "refresh_cache-ac0f967d-18c8-45d8-94ca-829a1fe11451" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1037.849378] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquired lock "refresh_cache-ac0f967d-18c8-45d8-94ca-829a1fe11451" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1037.849378] env[69328]: DEBUG nova.network.neutron [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1037.875897] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c1b9677b-6ae4-4ff8-b72c-b37efc8cb7ac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1037.900979] env[69328]: DEBUG oslo_vmware.api [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52399e66-3d34-a74a-45fe-ffd887f21f6a, 'name': SearchDatastore_Task, 'duration_secs': 0.010933} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.901328] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1037.901748] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1037.902026] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1037.902186] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1037.902569] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1037.902923] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-72a8e5ec-0f10-41f1-99b3-33ecb72c1cbf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.916273] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1037.916449] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1037.917168] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad0cb8d5-fb97-43c0-9e9d-a397b10f00f5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.923008] env[69328]: DEBUG oslo_vmware.api [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 1037.923008] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5252656a-860d-1829-9caa-c11882242013" [ 1037.923008] env[69328]: _type = "Task" [ 1037.923008] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.931104] env[69328]: DEBUG oslo_vmware.api [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5252656a-860d-1829-9caa-c11882242013, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.133398] env[69328]: DEBUG oslo_vmware.api [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273858, 'name': PowerOnVM_Task, 'duration_secs': 0.565212} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.133684] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1038.133912] env[69328]: INFO nova.compute.manager [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Took 8.49 seconds to spawn the instance on the hypervisor. [ 1038.134118] env[69328]: DEBUG nova.compute.manager [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1038.135152] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-341624e6-e066-497f-8d25-4ef467dfad06 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.385656] env[69328]: DEBUG nova.network.neutron [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1038.436417] env[69328]: DEBUG oslo_vmware.api [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5252656a-860d-1829-9caa-c11882242013, 'name': SearchDatastore_Task, 'duration_secs': 0.03221} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.436417] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6f59327-ffdf-41d3-8bc7-95e7df325d45 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.445027] env[69328]: DEBUG oslo_vmware.api [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 1038.445027] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]524a9171-62c4-46d9-14bc-e3c656b29bba" [ 1038.445027] env[69328]: _type = "Task" [ 1038.445027] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.457067] env[69328]: DEBUG oslo_vmware.api [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]524a9171-62c4-46d9-14bc-e3c656b29bba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.657600] env[69328]: INFO nova.compute.manager [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Took 33.83 seconds to build instance. [ 1038.702198] env[69328]: DEBUG nova.network.neutron [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] Updating instance_info_cache with network_info: [{"id": "7db792d2-ce9f-4333-b755-84eb8e83d788", "address": "fa:16:3e:a2:f7:e9", "network": {"id": "7f5dcab4-3cec-42a1-b589-88cb373af645", "bridge": "br-int", "label": "tempest-ServersTestJSON-1833802368-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d86de4d5055642aa86a29c6768e3db46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b94712a6-b777-47dd-bc06-f9acfce2d936", "external-id": "nsx-vlan-transportzone-494", "segmentation_id": 494, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7db792d2-ce", "ovs_interfaceid": "7db792d2-ce9f-4333-b755-84eb8e83d788", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1038.716066] env[69328]: DEBUG nova.compute.manager [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Stashing vm_state: active {{(pid=69328) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1038.790671] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e71999aa-679a-4341-b151-da7d58c2e36a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.804102] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43240ac0-e925-48f8-a2dd-39adc4aa89c2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.845440] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d3925c9-51e4-4b18-b17a-fe326821110d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.853514] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec7565b2-2f7d-44c4-adf4-2636d6777c6a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.866970] env[69328]: DEBUG nova.compute.provider_tree [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1038.924314] env[69328]: DEBUG nova.compute.manager [req-c459e92c-eb67-49ac-b755-828d13331580 req-2066e94a-0d2d-4793-9e88-67393c0558a7 service nova] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Received event network-vif-deleted-7748ad51-059a-4dd5-b929-13b3fbac9d5c {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1038.924314] env[69328]: DEBUG nova.compute.manager [req-c459e92c-eb67-49ac-b755-828d13331580 req-2066e94a-0d2d-4793-9e88-67393c0558a7 service nova] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] Received event network-vif-plugged-7db792d2-ce9f-4333-b755-84eb8e83d788 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1038.924314] env[69328]: DEBUG oslo_concurrency.lockutils [req-c459e92c-eb67-49ac-b755-828d13331580 req-2066e94a-0d2d-4793-9e88-67393c0558a7 service nova] Acquiring lock "ac0f967d-18c8-45d8-94ca-829a1fe11451-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1038.924314] env[69328]: DEBUG oslo_concurrency.lockutils [req-c459e92c-eb67-49ac-b755-828d13331580 req-2066e94a-0d2d-4793-9e88-67393c0558a7 service nova] Lock "ac0f967d-18c8-45d8-94ca-829a1fe11451-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1038.924314] env[69328]: DEBUG oslo_concurrency.lockutils [req-c459e92c-eb67-49ac-b755-828d13331580 req-2066e94a-0d2d-4793-9e88-67393c0558a7 service nova] Lock "ac0f967d-18c8-45d8-94ca-829a1fe11451-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1038.924314] env[69328]: DEBUG nova.compute.manager [req-c459e92c-eb67-49ac-b755-828d13331580 req-2066e94a-0d2d-4793-9e88-67393c0558a7 service nova] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] No waiting events found dispatching network-vif-plugged-7db792d2-ce9f-4333-b755-84eb8e83d788 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1038.924314] env[69328]: WARNING nova.compute.manager [req-c459e92c-eb67-49ac-b755-828d13331580 req-2066e94a-0d2d-4793-9e88-67393c0558a7 service nova] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] Received unexpected event network-vif-plugged-7db792d2-ce9f-4333-b755-84eb8e83d788 for instance with vm_state building and task_state spawning. [ 1038.924314] env[69328]: DEBUG nova.compute.manager [req-c459e92c-eb67-49ac-b755-828d13331580 req-2066e94a-0d2d-4793-9e88-67393c0558a7 service nova] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] Received event network-changed-7db792d2-ce9f-4333-b755-84eb8e83d788 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1038.924314] env[69328]: DEBUG nova.compute.manager [req-c459e92c-eb67-49ac-b755-828d13331580 req-2066e94a-0d2d-4793-9e88-67393c0558a7 service nova] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] Refreshing instance network info cache due to event network-changed-7db792d2-ce9f-4333-b755-84eb8e83d788. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1038.924314] env[69328]: DEBUG oslo_concurrency.lockutils [req-c459e92c-eb67-49ac-b755-828d13331580 req-2066e94a-0d2d-4793-9e88-67393c0558a7 service nova] Acquiring lock "refresh_cache-ac0f967d-18c8-45d8-94ca-829a1fe11451" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1038.954698] env[69328]: DEBUG oslo_vmware.api [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]524a9171-62c4-46d9-14bc-e3c656b29bba, 'name': SearchDatastore_Task, 'duration_secs': 0.026813} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.956822] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1038.956822] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e/6ad357d9-c35a-4fdb-8dd0-39a0617bf85e.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1038.956822] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-75b3af03-cb36-4298-80ca-98a59fa99997 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.967525] env[69328]: DEBUG oslo_vmware.api [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 1038.967525] env[69328]: value = "task-3273859" [ 1038.967525] env[69328]: _type = "Task" [ 1038.967525] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.973155] env[69328]: DEBUG oslo_vmware.api [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273859, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.158655] env[69328]: DEBUG oslo_concurrency.lockutils [None req-09e03c96-f1da-4287-9d58-059b5ccd7cb5 tempest-ServersAdminTestJSON-531277908 tempest-ServersAdminTestJSON-531277908-project-admin] Acquiring lock "refresh_cache-c7321021-15ea-47f4-a8ca-1045f2966394" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.160093] env[69328]: DEBUG oslo_concurrency.lockutils [None req-09e03c96-f1da-4287-9d58-059b5ccd7cb5 tempest-ServersAdminTestJSON-531277908 tempest-ServersAdminTestJSON-531277908-project-admin] Acquired lock "refresh_cache-c7321021-15ea-47f4-a8ca-1045f2966394" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1039.160093] env[69328]: DEBUG nova.network.neutron [None req-09e03c96-f1da-4287-9d58-059b5ccd7cb5 tempest-ServersAdminTestJSON-531277908 tempest-ServersAdminTestJSON-531277908-project-admin] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1039.163956] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a58d85cb-26bf-48fc-84e9-3b2c5032b8cf tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Lock "c7321021-15ea-47f4-a8ca-1045f2966394" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.351s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1039.207093] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Releasing lock "refresh_cache-ac0f967d-18c8-45d8-94ca-829a1fe11451" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1039.207402] env[69328]: DEBUG nova.compute.manager [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] Instance network_info: |[{"id": "7db792d2-ce9f-4333-b755-84eb8e83d788", "address": "fa:16:3e:a2:f7:e9", "network": {"id": "7f5dcab4-3cec-42a1-b589-88cb373af645", "bridge": "br-int", "label": "tempest-ServersTestJSON-1833802368-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d86de4d5055642aa86a29c6768e3db46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b94712a6-b777-47dd-bc06-f9acfce2d936", "external-id": "nsx-vlan-transportzone-494", "segmentation_id": 494, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7db792d2-ce", "ovs_interfaceid": "7db792d2-ce9f-4333-b755-84eb8e83d788", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1039.207708] env[69328]: DEBUG oslo_concurrency.lockutils [req-c459e92c-eb67-49ac-b755-828d13331580 req-2066e94a-0d2d-4793-9e88-67393c0558a7 service nova] Acquired lock "refresh_cache-ac0f967d-18c8-45d8-94ca-829a1fe11451" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1039.207909] env[69328]: DEBUG nova.network.neutron [req-c459e92c-eb67-49ac-b755-828d13331580 req-2066e94a-0d2d-4793-9e88-67393c0558a7 service nova] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] Refreshing network info cache for port 7db792d2-ce9f-4333-b755-84eb8e83d788 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1039.209129] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a2:f7:e9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b94712a6-b777-47dd-bc06-f9acfce2d936', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7db792d2-ce9f-4333-b755-84eb8e83d788', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1039.218112] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1039.218112] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1039.218112] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1f7486e7-dfe8-488b-8684-987f2bee6ce3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.245350] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1039.245350] env[69328]: value = "task-3273860" [ 1039.245350] env[69328]: _type = "Task" [ 1039.245350] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.254015] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273860, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.255809] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1039.371340] env[69328]: DEBUG nova.scheduler.client.report [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1039.479690] env[69328]: DEBUG oslo_vmware.api [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273859, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.760413] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273860, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.844082] env[69328]: DEBUG oslo_concurrency.lockutils [None req-427d6a49-3d5a-4df6-8145-722a95c1f0b6 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Acquiring lock "19f537b7-90fc-4832-b137-e042e00a508b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1039.844548] env[69328]: DEBUG oslo_concurrency.lockutils [None req-427d6a49-3d5a-4df6-8145-722a95c1f0b6 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Lock "19f537b7-90fc-4832-b137-e042e00a508b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1039.844797] env[69328]: DEBUG oslo_concurrency.lockutils [None req-427d6a49-3d5a-4df6-8145-722a95c1f0b6 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Acquiring lock "19f537b7-90fc-4832-b137-e042e00a508b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1039.845066] env[69328]: DEBUG oslo_concurrency.lockutils [None req-427d6a49-3d5a-4df6-8145-722a95c1f0b6 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Lock "19f537b7-90fc-4832-b137-e042e00a508b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1039.845317] env[69328]: DEBUG oslo_concurrency.lockutils [None req-427d6a49-3d5a-4df6-8145-722a95c1f0b6 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Lock "19f537b7-90fc-4832-b137-e042e00a508b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1039.847605] env[69328]: INFO nova.compute.manager [None req-427d6a49-3d5a-4df6-8145-722a95c1f0b6 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 19f537b7-90fc-4832-b137-e042e00a508b] Terminating instance [ 1039.876571] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.065s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1039.880812] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.648s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1039.884809] env[69328]: INFO nova.compute.claims [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 14521ee3-d749-48b4-aeec-23c94ca2cf9f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1039.950650] env[69328]: INFO nova.network.neutron [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Updating port c74af0b7-ebfb-4563-9208-a18235899a6c with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1039.977936] env[69328]: DEBUG oslo_vmware.api [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273859, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.724204} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.979047] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e/6ad357d9-c35a-4fdb-8dd0-39a0617bf85e.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1039.979047] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1039.979047] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0e84fb96-4c34-4d45-95af-c310ecfbc394 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.989322] env[69328]: DEBUG oslo_vmware.api [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 1039.989322] env[69328]: value = "task-3273861" [ 1039.989322] env[69328]: _type = "Task" [ 1039.989322] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.001242] env[69328]: DEBUG oslo_vmware.api [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273861, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.003040] env[69328]: DEBUG nova.network.neutron [req-c459e92c-eb67-49ac-b755-828d13331580 req-2066e94a-0d2d-4793-9e88-67393c0558a7 service nova] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] Updated VIF entry in instance network info cache for port 7db792d2-ce9f-4333-b755-84eb8e83d788. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1040.003040] env[69328]: DEBUG nova.network.neutron [req-c459e92c-eb67-49ac-b755-828d13331580 req-2066e94a-0d2d-4793-9e88-67393c0558a7 service nova] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] Updating instance_info_cache with network_info: [{"id": "7db792d2-ce9f-4333-b755-84eb8e83d788", "address": "fa:16:3e:a2:f7:e9", "network": {"id": "7f5dcab4-3cec-42a1-b589-88cb373af645", "bridge": "br-int", "label": "tempest-ServersTestJSON-1833802368-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d86de4d5055642aa86a29c6768e3db46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b94712a6-b777-47dd-bc06-f9acfce2d936", "external-id": "nsx-vlan-transportzone-494", "segmentation_id": 494, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7db792d2-ce", "ovs_interfaceid": "7db792d2-ce9f-4333-b755-84eb8e83d788", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1040.108133] env[69328]: DEBUG nova.network.neutron [None req-09e03c96-f1da-4287-9d58-059b5ccd7cb5 tempest-ServersAdminTestJSON-531277908 tempest-ServersAdminTestJSON-531277908-project-admin] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Updating instance_info_cache with network_info: [{"id": "b90c50eb-decb-4850-8c7e-af0b3b67eaf0", "address": "fa:16:3e:92:1a:33", "network": {"id": "77f17547-8c62-4a31-9840-8d2cbb1bfbab", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-163692077-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bad0df17bba4bc996fe5cf1faf23fad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb90c50eb-de", "ovs_interfaceid": "b90c50eb-decb-4850-8c7e-af0b3b67eaf0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1040.258145] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273860, 'name': CreateVM_Task, 'duration_secs': 0.558788} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.258798] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1040.260034] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1040.261576] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1040.261576] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1040.261576] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7612a29b-2910-4ea0-8d28-3dea2e60d093 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.268471] env[69328]: DEBUG oslo_vmware.api [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 1040.268471] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]523338cf-3b22-14dd-8b43-96d3edba6c07" [ 1040.268471] env[69328]: _type = "Task" [ 1040.268471] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.279654] env[69328]: DEBUG oslo_vmware.api [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]523338cf-3b22-14dd-8b43-96d3edba6c07, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.354018] env[69328]: DEBUG nova.compute.manager [None req-427d6a49-3d5a-4df6-8145-722a95c1f0b6 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 19f537b7-90fc-4832-b137-e042e00a508b] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1040.354018] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-427d6a49-3d5a-4df6-8145-722a95c1f0b6 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 19f537b7-90fc-4832-b137-e042e00a508b] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1040.354018] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23d78d36-4f4a-4d04-81ab-05bf08529982 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.361758] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-427d6a49-3d5a-4df6-8145-722a95c1f0b6 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 19f537b7-90fc-4832-b137-e042e00a508b] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1040.361758] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-840227da-cf5f-41e1-842f-2d35a5c41efc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.368878] env[69328]: DEBUG oslo_vmware.api [None req-427d6a49-3d5a-4df6-8145-722a95c1f0b6 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Waiting for the task: (returnval){ [ 1040.368878] env[69328]: value = "task-3273862" [ 1040.368878] env[69328]: _type = "Task" [ 1040.368878] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.379233] env[69328]: DEBUG oslo_vmware.api [None req-427d6a49-3d5a-4df6-8145-722a95c1f0b6 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273862, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.499771] env[69328]: DEBUG oslo_vmware.api [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273861, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071397} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.500767] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1040.502357] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc37ed05-320d-49f6-89e8-97f2c8548068 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.505696] env[69328]: DEBUG oslo_concurrency.lockutils [req-c459e92c-eb67-49ac-b755-828d13331580 req-2066e94a-0d2d-4793-9e88-67393c0558a7 service nova] Releasing lock "refresh_cache-ac0f967d-18c8-45d8-94ca-829a1fe11451" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1040.529817] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Reconfiguring VM instance instance-00000062 to attach disk [datastore2] 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e/6ad357d9-c35a-4fdb-8dd0-39a0617bf85e.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1040.532019] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c4733346-4177-409e-8516-e4e152e3fa3f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.554614] env[69328]: DEBUG oslo_vmware.api [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 1040.554614] env[69328]: value = "task-3273863" [ 1040.554614] env[69328]: _type = "Task" [ 1040.554614] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.562913] env[69328]: DEBUG oslo_vmware.api [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273863, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.610994] env[69328]: DEBUG oslo_concurrency.lockutils [None req-09e03c96-f1da-4287-9d58-059b5ccd7cb5 tempest-ServersAdminTestJSON-531277908 tempest-ServersAdminTestJSON-531277908-project-admin] Releasing lock "refresh_cache-c7321021-15ea-47f4-a8ca-1045f2966394" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1040.611372] env[69328]: DEBUG nova.compute.manager [None req-09e03c96-f1da-4287-9d58-059b5ccd7cb5 tempest-ServersAdminTestJSON-531277908 tempest-ServersAdminTestJSON-531277908-project-admin] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Inject network info {{(pid=69328) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 1040.611751] env[69328]: DEBUG nova.compute.manager [None req-09e03c96-f1da-4287-9d58-059b5ccd7cb5 tempest-ServersAdminTestJSON-531277908 tempest-ServersAdminTestJSON-531277908-project-admin] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] network_info to inject: |[{"id": "b90c50eb-decb-4850-8c7e-af0b3b67eaf0", "address": "fa:16:3e:92:1a:33", "network": {"id": "77f17547-8c62-4a31-9840-8d2cbb1bfbab", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-163692077-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bad0df17bba4bc996fe5cf1faf23fad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb90c50eb-de", "ovs_interfaceid": "b90c50eb-decb-4850-8c7e-af0b3b67eaf0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 1040.619029] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-09e03c96-f1da-4287-9d58-059b5ccd7cb5 tempest-ServersAdminTestJSON-531277908 tempest-ServersAdminTestJSON-531277908-project-admin] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Reconfiguring VM instance to set the machine id {{(pid=69328) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1040.619029] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1057ab46-8bc6-426e-8803-d3d3dee90600 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.636206] env[69328]: DEBUG oslo_vmware.api [None req-09e03c96-f1da-4287-9d58-059b5ccd7cb5 tempest-ServersAdminTestJSON-531277908 tempest-ServersAdminTestJSON-531277908-project-admin] Waiting for the task: (returnval){ [ 1040.636206] env[69328]: value = "task-3273864" [ 1040.636206] env[69328]: _type = "Task" [ 1040.636206] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.778886] env[69328]: DEBUG oslo_vmware.api [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]523338cf-3b22-14dd-8b43-96d3edba6c07, 'name': SearchDatastore_Task, 'duration_secs': 0.040536} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.779229] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1040.779455] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1040.779701] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1040.779845] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1040.780035] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1040.780373] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dd6d09e1-2453-4201-92e9-0dd9f7c18b52 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.788906] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1040.789122] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1040.789931] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16c0f444-ac02-4a8d-87b0-5946094f2d00 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.795461] env[69328]: DEBUG oslo_vmware.api [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 1040.795461] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52dcdef7-d85f-6382-b6dd-bef740c9e923" [ 1040.795461] env[69328]: _type = "Task" [ 1040.795461] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.803283] env[69328]: DEBUG oslo_vmware.api [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52dcdef7-d85f-6382-b6dd-bef740c9e923, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.879280] env[69328]: DEBUG oslo_vmware.api [None req-427d6a49-3d5a-4df6-8145-722a95c1f0b6 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273862, 'name': PowerOffVM_Task, 'duration_secs': 0.26677} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.879578] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-427d6a49-3d5a-4df6-8145-722a95c1f0b6 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 19f537b7-90fc-4832-b137-e042e00a508b] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1040.879716] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-427d6a49-3d5a-4df6-8145-722a95c1f0b6 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 19f537b7-90fc-4832-b137-e042e00a508b] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1040.879965] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9522686a-87d4-44b3-a075-04e2e5c7fa9f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.969673] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-427d6a49-3d5a-4df6-8145-722a95c1f0b6 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 19f537b7-90fc-4832-b137-e042e00a508b] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1040.969801] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-427d6a49-3d5a-4df6-8145-722a95c1f0b6 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 19f537b7-90fc-4832-b137-e042e00a508b] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1040.971682] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-427d6a49-3d5a-4df6-8145-722a95c1f0b6 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Deleting the datastore file [datastore2] 19f537b7-90fc-4832-b137-e042e00a508b {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1040.971682] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-74256125-9780-41ac-85ea-4dd30ab633d9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.978578] env[69328]: DEBUG oslo_vmware.api [None req-427d6a49-3d5a-4df6-8145-722a95c1f0b6 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Waiting for the task: (returnval){ [ 1040.978578] env[69328]: value = "task-3273866" [ 1040.978578] env[69328]: _type = "Task" [ 1040.978578] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.990725] env[69328]: DEBUG oslo_vmware.api [None req-427d6a49-3d5a-4df6-8145-722a95c1f0b6 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273866, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.068447] env[69328]: DEBUG oslo_vmware.api [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273863, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.153850] env[69328]: DEBUG oslo_vmware.api [None req-09e03c96-f1da-4287-9d58-059b5ccd7cb5 tempest-ServersAdminTestJSON-531277908 tempest-ServersAdminTestJSON-531277908-project-admin] Task: {'id': task-3273864, 'name': ReconfigVM_Task, 'duration_secs': 0.192325} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.154333] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-09e03c96-f1da-4287-9d58-059b5ccd7cb5 tempest-ServersAdminTestJSON-531277908 tempest-ServersAdminTestJSON-531277908-project-admin] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Reconfigured VM instance to set the machine id {{(pid=69328) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1041.307159] env[69328]: DEBUG oslo_vmware.api [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52dcdef7-d85f-6382-b6dd-bef740c9e923, 'name': SearchDatastore_Task, 'duration_secs': 0.020819} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.314950] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed6540b7-91c2-4051-b322-102bcbefd1f6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.321408] env[69328]: DEBUG oslo_vmware.api [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 1041.321408] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]529c4361-c7d4-2c5b-ea69-bc073575fccc" [ 1041.321408] env[69328]: _type = "Task" [ 1041.321408] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.334818] env[69328]: DEBUG oslo_vmware.api [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]529c4361-c7d4-2c5b-ea69-bc073575fccc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.431988] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b4dfe5c-3217-4b09-98b0-3ec1bd4d246d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.440350] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ffc2eb6-744c-4b51-9565-6f62529e45a1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.474537] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0df3bf3-362d-4913-a263-4b01de08702c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.487519] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-742dd082-b65b-4f57-b80a-892b5e14e510 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.496326] env[69328]: DEBUG oslo_vmware.api [None req-427d6a49-3d5a-4df6-8145-722a95c1f0b6 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273866, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.322463} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.496326] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-427d6a49-3d5a-4df6-8145-722a95c1f0b6 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1041.496326] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-427d6a49-3d5a-4df6-8145-722a95c1f0b6 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 19f537b7-90fc-4832-b137-e042e00a508b] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1041.496326] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-427d6a49-3d5a-4df6-8145-722a95c1f0b6 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 19f537b7-90fc-4832-b137-e042e00a508b] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1041.496472] env[69328]: INFO nova.compute.manager [None req-427d6a49-3d5a-4df6-8145-722a95c1f0b6 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 19f537b7-90fc-4832-b137-e042e00a508b] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1041.496705] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-427d6a49-3d5a-4df6-8145-722a95c1f0b6 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1041.497302] env[69328]: DEBUG nova.compute.manager [-] [instance: 19f537b7-90fc-4832-b137-e042e00a508b] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1041.497395] env[69328]: DEBUG nova.network.neutron [-] [instance: 19f537b7-90fc-4832-b137-e042e00a508b] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1041.508599] env[69328]: DEBUG nova.compute.provider_tree [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1041.567417] env[69328]: DEBUG oslo_vmware.api [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273863, 'name': ReconfigVM_Task, 'duration_secs': 0.819893} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.567708] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Reconfigured VM instance instance-00000062 to attach disk [datastore2] 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e/6ad357d9-c35a-4fdb-8dd0-39a0617bf85e.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1041.568361] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f1a908c9-9847-4117-b732-f4a47b424156 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.574440] env[69328]: DEBUG oslo_vmware.api [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 1041.574440] env[69328]: value = "task-3273867" [ 1041.574440] env[69328]: _type = "Task" [ 1041.574440] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.588984] env[69328]: DEBUG oslo_vmware.api [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273867, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.689697] env[69328]: DEBUG nova.compute.manager [req-ee196b33-502e-48cf-af7d-a5119e05587e req-7cb76c23-79c3-42e0-9114-16b82e08c693 service nova] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Received event network-vif-plugged-c74af0b7-ebfb-4563-9208-a18235899a6c {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1041.689967] env[69328]: DEBUG oslo_concurrency.lockutils [req-ee196b33-502e-48cf-af7d-a5119e05587e req-7cb76c23-79c3-42e0-9114-16b82e08c693 service nova] Acquiring lock "a0952fdf-5570-4112-bc4d-e9f9cee1599c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1041.690139] env[69328]: DEBUG oslo_concurrency.lockutils [req-ee196b33-502e-48cf-af7d-a5119e05587e req-7cb76c23-79c3-42e0-9114-16b82e08c693 service nova] Lock "a0952fdf-5570-4112-bc4d-e9f9cee1599c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1041.690322] env[69328]: DEBUG oslo_concurrency.lockutils [req-ee196b33-502e-48cf-af7d-a5119e05587e req-7cb76c23-79c3-42e0-9114-16b82e08c693 service nova] Lock "a0952fdf-5570-4112-bc4d-e9f9cee1599c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1041.690492] env[69328]: DEBUG nova.compute.manager [req-ee196b33-502e-48cf-af7d-a5119e05587e req-7cb76c23-79c3-42e0-9114-16b82e08c693 service nova] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] No waiting events found dispatching network-vif-plugged-c74af0b7-ebfb-4563-9208-a18235899a6c {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1041.690655] env[69328]: WARNING nova.compute.manager [req-ee196b33-502e-48cf-af7d-a5119e05587e req-7cb76c23-79c3-42e0-9114-16b82e08c693 service nova] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Received unexpected event network-vif-plugged-c74af0b7-ebfb-4563-9208-a18235899a6c for instance with vm_state shelved_offloaded and task_state spawning. [ 1041.734663] env[69328]: INFO nova.compute.manager [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Rebuilding instance [ 1041.813478] env[69328]: DEBUG nova.compute.manager [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1041.814509] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9321b03-4f42-4a7a-8fa2-a8357195065f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.832981] env[69328]: DEBUG oslo_vmware.api [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]529c4361-c7d4-2c5b-ea69-bc073575fccc, 'name': SearchDatastore_Task, 'duration_secs': 0.020344} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.833445] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1041.833754] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] ac0f967d-18c8-45d8-94ca-829a1fe11451/ac0f967d-18c8-45d8-94ca-829a1fe11451.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1041.834073] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d63dfa62-06bf-4658-959e-e282d4d56013 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.843507] env[69328]: DEBUG oslo_vmware.api [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 1041.843507] env[69328]: value = "task-3273868" [ 1041.843507] env[69328]: _type = "Task" [ 1041.843507] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.853861] env[69328]: DEBUG oslo_vmware.api [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273868, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.012080] env[69328]: DEBUG nova.scheduler.client.report [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1042.091104] env[69328]: DEBUG oslo_vmware.api [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273867, 'name': Rename_Task, 'duration_secs': 0.224809} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.091104] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1042.091104] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-092a1adf-6e70-49b8-82cf-a5f921be057a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.099617] env[69328]: DEBUG oslo_vmware.api [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 1042.099617] env[69328]: value = "task-3273869" [ 1042.099617] env[69328]: _type = "Task" [ 1042.099617] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.107892] env[69328]: DEBUG oslo_vmware.api [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273869, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.321587] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Acquiring lock "refresh_cache-a0952fdf-5570-4112-bc4d-e9f9cee1599c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1042.321762] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Acquired lock "refresh_cache-a0952fdf-5570-4112-bc4d-e9f9cee1599c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1042.321989] env[69328]: DEBUG nova.network.neutron [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1042.346565] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9163baf9-03d1-4a5a-9933-2d2e53d64012 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Acquiring lock "65fccb3f-5e0e-4140-be0a-5ba20f494d50" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1042.346982] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9163baf9-03d1-4a5a-9933-2d2e53d64012 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Lock "65fccb3f-5e0e-4140-be0a-5ba20f494d50" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.002s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1042.347228] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9163baf9-03d1-4a5a-9933-2d2e53d64012 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Acquiring lock "65fccb3f-5e0e-4140-be0a-5ba20f494d50-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1042.349512] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9163baf9-03d1-4a5a-9933-2d2e53d64012 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Lock "65fccb3f-5e0e-4140-be0a-5ba20f494d50-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1042.350716] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9163baf9-03d1-4a5a-9933-2d2e53d64012 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Lock "65fccb3f-5e0e-4140-be0a-5ba20f494d50-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.003s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1042.355899] env[69328]: INFO nova.compute.manager [None req-9163baf9-03d1-4a5a-9933-2d2e53d64012 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Terminating instance [ 1042.373574] env[69328]: DEBUG oslo_vmware.api [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273868, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.517886] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.637s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1042.520120] env[69328]: DEBUG nova.compute.manager [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 14521ee3-d749-48b4-aeec-23c94ca2cf9f] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1042.522780] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0f547f2f-a43b-462c-881b-bbc0919972d2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.400s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1042.523456] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0f547f2f-a43b-462c-881b-bbc0919972d2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1042.526889] env[69328]: DEBUG oslo_concurrency.lockutils [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.874s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1042.528982] env[69328]: INFO nova.compute.claims [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1042.572273] env[69328]: INFO nova.scheduler.client.report [None req-0f547f2f-a43b-462c-881b-bbc0919972d2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Deleted allocations for instance 96f604a9-e42c-4aa8-b5b5-edcb34901d94 [ 1042.614056] env[69328]: DEBUG oslo_vmware.api [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273869, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.829663] env[69328]: DEBUG nova.network.neutron [-] [instance: 19f537b7-90fc-4832-b137-e042e00a508b] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1042.831668] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1042.832345] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a5060917-4299-42fd-bd7a-d4b86d087ccf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.841567] env[69328]: DEBUG oslo_vmware.api [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 1042.841567] env[69328]: value = "task-3273870" [ 1042.841567] env[69328]: _type = "Task" [ 1042.841567] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.852970] env[69328]: DEBUG oslo_vmware.api [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273870, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.864535] env[69328]: DEBUG oslo_vmware.api [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273868, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.776368} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.867106] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] ac0f967d-18c8-45d8-94ca-829a1fe11451/ac0f967d-18c8-45d8-94ca-829a1fe11451.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1042.867359] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1042.867987] env[69328]: DEBUG nova.compute.manager [None req-9163baf9-03d1-4a5a-9933-2d2e53d64012 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1042.868204] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9163baf9-03d1-4a5a-9933-2d2e53d64012 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1042.868738] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-718f766f-83d8-48a1-a9c5-58be9df88030 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.871250] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a89bc621-3336-4915-85bd-44e125c5dc72 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.879329] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-9163baf9-03d1-4a5a-9933-2d2e53d64012 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1042.881158] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d10c015c-764f-4a26-a35e-c7f897fcc7be {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.883223] env[69328]: DEBUG oslo_vmware.api [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 1042.883223] env[69328]: value = "task-3273871" [ 1042.883223] env[69328]: _type = "Task" [ 1042.883223] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.888747] env[69328]: DEBUG oslo_vmware.api [None req-9163baf9-03d1-4a5a-9933-2d2e53d64012 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Waiting for the task: (returnval){ [ 1042.888747] env[69328]: value = "task-3273872" [ 1042.888747] env[69328]: _type = "Task" [ 1042.888747] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.895945] env[69328]: DEBUG oslo_vmware.api [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273871, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.900970] env[69328]: DEBUG oslo_vmware.api [None req-9163baf9-03d1-4a5a-9933-2d2e53d64012 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Task: {'id': task-3273872, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.034776] env[69328]: DEBUG nova.compute.utils [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1043.041047] env[69328]: DEBUG nova.compute.manager [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 14521ee3-d749-48b4-aeec-23c94ca2cf9f] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1043.041047] env[69328]: DEBUG nova.network.neutron [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 14521ee3-d749-48b4-aeec-23c94ca2cf9f] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1043.087248] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0f547f2f-a43b-462c-881b-bbc0919972d2 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "96f604a9-e42c-4aa8-b5b5-edcb34901d94" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.363s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1043.106635] env[69328]: DEBUG nova.policy [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1685bb9a09d84a7a92306c64f0e5895e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '75d5853e3c724d02bacfa75173e38ab3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 1043.115495] env[69328]: DEBUG oslo_vmware.api [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273869, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.141465] env[69328]: DEBUG nova.network.neutron [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Updating instance_info_cache with network_info: [{"id": "c74af0b7-ebfb-4563-9208-a18235899a6c", "address": "fa:16:3e:35:bb:fc", "network": {"id": "cc75e08f-f0f3-4b52-9b40-0de73f044554", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1326858830-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1393040bf5304571ae4b66d0a4ee7b6e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc74af0b7-eb", "ovs_interfaceid": "c74af0b7-ebfb-4563-9208-a18235899a6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1043.332539] env[69328]: INFO nova.compute.manager [-] [instance: 19f537b7-90fc-4832-b137-e042e00a508b] Took 1.83 seconds to deallocate network for instance. [ 1043.360139] env[69328]: DEBUG oslo_vmware.api [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273870, 'name': PowerOffVM_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.397870] env[69328]: DEBUG oslo_vmware.api [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273871, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.10214} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.398711] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1043.399473] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57211b27-4f4c-45d7-b854-d01654b5ada6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.406442] env[69328]: DEBUG oslo_vmware.api [None req-9163baf9-03d1-4a5a-9933-2d2e53d64012 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Task: {'id': task-3273872, 'name': PowerOffVM_Task, 'duration_secs': 0.476325} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.407099] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-9163baf9-03d1-4a5a-9933-2d2e53d64012 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1043.407476] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9163baf9-03d1-4a5a-9933-2d2e53d64012 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1043.407747] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-badb3c5a-3d8e-4956-97d6-afdb4528b486 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.430299] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] Reconfiguring VM instance instance-00000063 to attach disk [datastore2] ac0f967d-18c8-45d8-94ca-829a1fe11451/ac0f967d-18c8-45d8-94ca-829a1fe11451.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1043.431040] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-48b96f83-f0c2-4370-9f81-9d5e1369e416 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.450157] env[69328]: DEBUG oslo_vmware.api [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 1043.450157] env[69328]: value = "task-3273874" [ 1043.450157] env[69328]: _type = "Task" [ 1043.450157] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.458721] env[69328]: DEBUG oslo_vmware.api [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273874, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.477673] env[69328]: DEBUG nova.network.neutron [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 14521ee3-d749-48b4-aeec-23c94ca2cf9f] Successfully created port: 14cfba2e-1458-4c09-a1bb-825784ca30af {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1043.501982] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9163baf9-03d1-4a5a-9933-2d2e53d64012 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1043.502241] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9163baf9-03d1-4a5a-9933-2d2e53d64012 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1043.502534] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-9163baf9-03d1-4a5a-9933-2d2e53d64012 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Deleting the datastore file [datastore1] 65fccb3f-5e0e-4140-be0a-5ba20f494d50 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1043.502873] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e0654db7-0506-45cb-8faa-5184c5039139 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.510059] env[69328]: DEBUG oslo_vmware.api [None req-9163baf9-03d1-4a5a-9933-2d2e53d64012 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Waiting for the task: (returnval){ [ 1043.510059] env[69328]: value = "task-3273875" [ 1043.510059] env[69328]: _type = "Task" [ 1043.510059] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.520234] env[69328]: DEBUG oslo_vmware.api [None req-9163baf9-03d1-4a5a-9933-2d2e53d64012 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Task: {'id': task-3273875, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.541055] env[69328]: DEBUG nova.compute.manager [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 14521ee3-d749-48b4-aeec-23c94ca2cf9f] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1043.617991] env[69328]: DEBUG oslo_vmware.api [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273869, 'name': PowerOnVM_Task, 'duration_secs': 1.120807} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.618550] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1043.618827] env[69328]: INFO nova.compute.manager [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Took 9.50 seconds to spawn the instance on the hypervisor. [ 1043.619071] env[69328]: DEBUG nova.compute.manager [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1043.620778] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6f6744b-5c92-4786-b0ca-8770dc360849 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.644952] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Releasing lock "refresh_cache-a0952fdf-5570-4112-bc4d-e9f9cee1599c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1043.684706] env[69328]: DEBUG nova.virt.hardware [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='a44eb8f95d49060dd1a1f9caa69fe7ab',container_format='bare',created_at=2025-04-03T17:42:17Z,direct_url=,disk_format='vmdk',id=8868d8b6-e8a6-4c40-9bca-fb6ec2c24443,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-1171557898-shelved',owner='1393040bf5304571ae4b66d0a4ee7b6e',properties=ImageMetaProps,protected=,size=31663616,status='active',tags=,updated_at=2025-04-03T17:42:35Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1043.685742] env[69328]: DEBUG nova.virt.hardware [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1043.685742] env[69328]: DEBUG nova.virt.hardware [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1043.685742] env[69328]: DEBUG nova.virt.hardware [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1043.685742] env[69328]: DEBUG nova.virt.hardware [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1043.685742] env[69328]: DEBUG nova.virt.hardware [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1043.686118] env[69328]: DEBUG nova.virt.hardware [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1043.686289] env[69328]: DEBUG nova.virt.hardware [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1043.686432] env[69328]: DEBUG nova.virt.hardware [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1043.686622] env[69328]: DEBUG nova.virt.hardware [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1043.686824] env[69328]: DEBUG nova.virt.hardware [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1043.688144] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7755c1e6-1aa8-46d9-a352-4b59140e5b38 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.701164] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f80b7df0-5a41-4923-950a-f3a95462624d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.719268] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:35:bb:fc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '10b81051-1eb1-406b-888c-4548c470c77e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c74af0b7-ebfb-4563-9208-a18235899a6c', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1043.726615] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1043.729831] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1043.730327] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5e26d517-5c79-4b2b-ad37-9727d7a4ef31 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.754923] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1043.754923] env[69328]: value = "task-3273876" [ 1043.754923] env[69328]: _type = "Task" [ 1043.754923] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.764035] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273876, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.836546] env[69328]: DEBUG nova.compute.manager [req-3a29bd11-03c0-40e9-b513-21e16a06e080 req-c88bfedf-43e9-437b-99ad-d80a6230481e service nova] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Received event network-changed-c74af0b7-ebfb-4563-9208-a18235899a6c {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1043.837224] env[69328]: DEBUG nova.compute.manager [req-3a29bd11-03c0-40e9-b513-21e16a06e080 req-c88bfedf-43e9-437b-99ad-d80a6230481e service nova] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Refreshing instance network info cache due to event network-changed-c74af0b7-ebfb-4563-9208-a18235899a6c. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1043.841023] env[69328]: DEBUG oslo_concurrency.lockutils [req-3a29bd11-03c0-40e9-b513-21e16a06e080 req-c88bfedf-43e9-437b-99ad-d80a6230481e service nova] Acquiring lock "refresh_cache-a0952fdf-5570-4112-bc4d-e9f9cee1599c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1043.841023] env[69328]: DEBUG oslo_concurrency.lockutils [req-3a29bd11-03c0-40e9-b513-21e16a06e080 req-c88bfedf-43e9-437b-99ad-d80a6230481e service nova] Acquired lock "refresh_cache-a0952fdf-5570-4112-bc4d-e9f9cee1599c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1043.841023] env[69328]: DEBUG nova.network.neutron [req-3a29bd11-03c0-40e9-b513-21e16a06e080 req-c88bfedf-43e9-437b-99ad-d80a6230481e service nova] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Refreshing network info cache for port c74af0b7-ebfb-4563-9208-a18235899a6c {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1043.844300] env[69328]: DEBUG oslo_concurrency.lockutils [None req-427d6a49-3d5a-4df6-8145-722a95c1f0b6 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1043.857440] env[69328]: DEBUG oslo_vmware.api [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273870, 'name': PowerOffVM_Task, 'duration_secs': 0.519732} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.858333] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1043.858333] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1043.862238] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0571759a-74bd-4cef-be7a-bae81d6d2606 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.872867] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1043.872867] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ee4948e1-59ab-47c5-8d43-2c33c2a77eff {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.962717] env[69328]: DEBUG oslo_vmware.api [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273874, 'name': ReconfigVM_Task, 'duration_secs': 0.491714} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.965727] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] Reconfigured VM instance instance-00000063 to attach disk [datastore2] ac0f967d-18c8-45d8-94ca-829a1fe11451/ac0f967d-18c8-45d8-94ca-829a1fe11451.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1043.966414] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1043.966596] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1043.966762] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Deleting the datastore file [datastore2] 65e38a02-880b-46e2-8866-645a9fc17c7a {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1043.967366] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9ecb82e0-8dab-4965-8a7f-40c12cc75ac8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.969146] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c29c7f4c-36b1-41dc-bf1c-0389025ed737 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.975439] env[69328]: DEBUG oslo_vmware.api [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 1043.975439] env[69328]: value = "task-3273879" [ 1043.975439] env[69328]: _type = "Task" [ 1043.975439] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.976911] env[69328]: DEBUG oslo_vmware.api [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 1043.976911] env[69328]: value = "task-3273878" [ 1043.976911] env[69328]: _type = "Task" [ 1043.976911] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.992156] env[69328]: DEBUG oslo_vmware.api [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273879, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.996576] env[69328]: DEBUG oslo_vmware.api [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273878, 'name': Rename_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.023021] env[69328]: DEBUG oslo_vmware.api [None req-9163baf9-03d1-4a5a-9933-2d2e53d64012 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Task: {'id': task-3273875, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.387358} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.024298] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-9163baf9-03d1-4a5a-9933-2d2e53d64012 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1044.024543] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9163baf9-03d1-4a5a-9933-2d2e53d64012 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1044.024750] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9163baf9-03d1-4a5a-9933-2d2e53d64012 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1044.024988] env[69328]: INFO nova.compute.manager [None req-9163baf9-03d1-4a5a-9933-2d2e53d64012 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1044.025199] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9163baf9-03d1-4a5a-9933-2d2e53d64012 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1044.025417] env[69328]: DEBUG nova.compute.manager [-] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1044.025512] env[69328]: DEBUG nova.network.neutron [-] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1044.085177] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-200139d8-1129-4be6-866d-f117119a8eb1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.093860] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9abfd321-6e58-4641-ad41-1dacdbdc1ee6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.145109] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb7c620d-ce57-479a-8e66-9f03fe905ff5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.150602] env[69328]: INFO nova.compute.manager [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Took 35.99 seconds to build instance. [ 1044.157498] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bf174f8-705f-445d-b68f-cf8dc3550330 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.172657] env[69328]: DEBUG nova.compute.provider_tree [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1044.268665] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273876, 'name': CreateVM_Task, 'duration_secs': 0.436265} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.268867] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1044.271349] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8868d8b6-e8a6-4c40-9bca-fb6ec2c24443" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1044.271349] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8868d8b6-e8a6-4c40-9bca-fb6ec2c24443" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1044.271349] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/8868d8b6-e8a6-4c40-9bca-fb6ec2c24443" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1044.271349] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dfc3ac3d-34a5-4534-b47b-de9ff09aeaef {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.276190] env[69328]: DEBUG oslo_vmware.api [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Waiting for the task: (returnval){ [ 1044.276190] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5247c24b-ef9e-fff8-c75c-895ee32910b9" [ 1044.276190] env[69328]: _type = "Task" [ 1044.276190] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.284448] env[69328]: DEBUG oslo_vmware.api [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5247c24b-ef9e-fff8-c75c-895ee32910b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.496504] env[69328]: DEBUG oslo_vmware.api [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273879, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.214518} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.497498] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1044.497498] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1044.497672] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1044.505475] env[69328]: DEBUG oslo_vmware.api [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273878, 'name': Rename_Task, 'duration_secs': 0.206081} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.505475] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1044.508742] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-733c2025-5a59-4806-907d-1be58c580690 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.514109] env[69328]: DEBUG oslo_vmware.api [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 1044.514109] env[69328]: value = "task-3273880" [ 1044.514109] env[69328]: _type = "Task" [ 1044.514109] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.524828] env[69328]: DEBUG oslo_vmware.api [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273880, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.553582] env[69328]: DEBUG nova.compute.manager [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 14521ee3-d749-48b4-aeec-23c94ca2cf9f] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1044.596737] env[69328]: DEBUG nova.virt.hardware [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1044.597364] env[69328]: DEBUG nova.virt.hardware [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1044.597364] env[69328]: DEBUG nova.virt.hardware [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1044.597364] env[69328]: DEBUG nova.virt.hardware [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1044.597585] env[69328]: DEBUG nova.virt.hardware [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1044.597693] env[69328]: DEBUG nova.virt.hardware [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1044.597890] env[69328]: DEBUG nova.virt.hardware [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1044.598010] env[69328]: DEBUG nova.virt.hardware [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1044.598534] env[69328]: DEBUG nova.virt.hardware [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1044.598534] env[69328]: DEBUG nova.virt.hardware [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1044.598629] env[69328]: DEBUG nova.virt.hardware [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1044.600359] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fe9fdf1-4e7f-417d-a8b2-f7cb609eae65 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.609584] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de2e79d0-2533-4022-b795-95d52facb4b5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.638678] env[69328]: DEBUG nova.network.neutron [req-3a29bd11-03c0-40e9-b513-21e16a06e080 req-c88bfedf-43e9-437b-99ad-d80a6230481e service nova] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Updated VIF entry in instance network info cache for port c74af0b7-ebfb-4563-9208-a18235899a6c. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1044.638678] env[69328]: DEBUG nova.network.neutron [req-3a29bd11-03c0-40e9-b513-21e16a06e080 req-c88bfedf-43e9-437b-99ad-d80a6230481e service nova] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Updating instance_info_cache with network_info: [{"id": "c74af0b7-ebfb-4563-9208-a18235899a6c", "address": "fa:16:3e:35:bb:fc", "network": {"id": "cc75e08f-f0f3-4b52-9b40-0de73f044554", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1326858830-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1393040bf5304571ae4b66d0a4ee7b6e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc74af0b7-eb", "ovs_interfaceid": "c74af0b7-ebfb-4563-9208-a18235899a6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1044.657606] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4e288a53-00bb-4c32-9db1-a42b69ac4da0 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lock "6ad357d9-c35a-4fdb-8dd0-39a0617bf85e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.507s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1044.676754] env[69328]: DEBUG nova.scheduler.client.report [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1044.790064] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8868d8b6-e8a6-4c40-9bca-fb6ec2c24443" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1044.790337] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Processing image 8868d8b6-e8a6-4c40-9bca-fb6ec2c24443 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1044.790610] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/8868d8b6-e8a6-4c40-9bca-fb6ec2c24443/8868d8b6-e8a6-4c40-9bca-fb6ec2c24443.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1044.790847] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Acquired lock "[datastore1] devstack-image-cache_base/8868d8b6-e8a6-4c40-9bca-fb6ec2c24443/8868d8b6-e8a6-4c40-9bca-fb6ec2c24443.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1044.790963] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1044.791257] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a39264e5-2035-4b29-bbcc-1228966869bf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.801380] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1044.801612] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1044.802405] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-182a451c-7b17-4635-ac76-b43cd1d7cfdc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.807774] env[69328]: DEBUG oslo_vmware.api [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Waiting for the task: (returnval){ [ 1044.807774] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f0f408-0f12-0988-ce25-f36515c66de9" [ 1044.807774] env[69328]: _type = "Task" [ 1044.807774] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.818575] env[69328]: DEBUG oslo_vmware.api [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f0f408-0f12-0988-ce25-f36515c66de9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.025626] env[69328]: DEBUG oslo_vmware.api [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273880, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.143344] env[69328]: DEBUG oslo_concurrency.lockutils [req-3a29bd11-03c0-40e9-b513-21e16a06e080 req-c88bfedf-43e9-437b-99ad-d80a6230481e service nova] Releasing lock "refresh_cache-a0952fdf-5570-4112-bc4d-e9f9cee1599c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1045.143344] env[69328]: DEBUG nova.compute.manager [req-3a29bd11-03c0-40e9-b513-21e16a06e080 req-c88bfedf-43e9-437b-99ad-d80a6230481e service nova] [instance: 19f537b7-90fc-4832-b137-e042e00a508b] Received event network-vif-deleted-5fae6e5f-3223-4872-83ba-b127e3c15d40 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1045.183462] env[69328]: DEBUG oslo_concurrency.lockutils [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.656s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1045.183979] env[69328]: DEBUG nova.compute.manager [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1045.186772] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.263s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1045.187208] env[69328]: DEBUG nova.objects.instance [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lazy-loading 'pci_requests' on Instance uuid 76210566-12d7-4f6a-afa1-6329e87e0f85 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1045.242787] env[69328]: DEBUG nova.network.neutron [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 14521ee3-d749-48b4-aeec-23c94ca2cf9f] Successfully updated port: 14cfba2e-1458-4c09-a1bb-825784ca30af {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1045.258870] env[69328]: DEBUG nova.network.neutron [-] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1045.324109] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Preparing fetch location {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1045.324628] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Fetch image to [datastore1] OSTACK_IMG_93ca0648-addc-4dd4-9ca8-ca5b832efff4/OSTACK_IMG_93ca0648-addc-4dd4-9ca8-ca5b832efff4.vmdk {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1045.324726] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Downloading stream optimized image 8868d8b6-e8a6-4c40-9bca-fb6ec2c24443 to [datastore1] OSTACK_IMG_93ca0648-addc-4dd4-9ca8-ca5b832efff4/OSTACK_IMG_93ca0648-addc-4dd4-9ca8-ca5b832efff4.vmdk on the data store datastore1 as vApp {{(pid=69328) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1045.324920] env[69328]: DEBUG nova.virt.vmwareapi.images [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Downloading image file data 8868d8b6-e8a6-4c40-9bca-fb6ec2c24443 to the ESX as VM named 'OSTACK_IMG_93ca0648-addc-4dd4-9ca8-ca5b832efff4' {{(pid=69328) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1045.422232] env[69328]: DEBUG oslo_vmware.rw_handles [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1045.422232] env[69328]: value = "resgroup-9" [ 1045.422232] env[69328]: _type = "ResourcePool" [ 1045.422232] env[69328]: }. {{(pid=69328) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1045.422717] env[69328]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-29256152-1df1-46ec-bad4-63c6fcd09443 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.452197] env[69328]: DEBUG oslo_vmware.rw_handles [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Lease: (returnval){ [ 1045.452197] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]527212d6-adaf-f2e7-bcc9-407ff332c7e2" [ 1045.452197] env[69328]: _type = "HttpNfcLease" [ 1045.452197] env[69328]: } obtained for vApp import into resource pool (val){ [ 1045.452197] env[69328]: value = "resgroup-9" [ 1045.452197] env[69328]: _type = "ResourcePool" [ 1045.452197] env[69328]: }. {{(pid=69328) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1045.452197] env[69328]: DEBUG oslo_vmware.api [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Waiting for the lease: (returnval){ [ 1045.452197] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]527212d6-adaf-f2e7-bcc9-407ff332c7e2" [ 1045.452197] env[69328]: _type = "HttpNfcLease" [ 1045.452197] env[69328]: } to be ready. {{(pid=69328) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1045.459792] env[69328]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1045.459792] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]527212d6-adaf-f2e7-bcc9-407ff332c7e2" [ 1045.459792] env[69328]: _type = "HttpNfcLease" [ 1045.459792] env[69328]: } is initializing. {{(pid=69328) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1045.527085] env[69328]: DEBUG oslo_vmware.api [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273880, 'name': PowerOnVM_Task, 'duration_secs': 0.864986} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.527380] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1045.527624] env[69328]: INFO nova.compute.manager [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] Took 8.81 seconds to spawn the instance on the hypervisor. [ 1045.527816] env[69328]: DEBUG nova.compute.manager [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1045.529145] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f3af0d0-e228-4c45-bc8c-4f5918c3d273 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.546535] env[69328]: DEBUG nova.virt.hardware [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1045.546925] env[69328]: DEBUG nova.virt.hardware [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1045.550101] env[69328]: DEBUG nova.virt.hardware [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1045.550101] env[69328]: DEBUG nova.virt.hardware [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1045.550101] env[69328]: DEBUG nova.virt.hardware [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1045.550101] env[69328]: DEBUG nova.virt.hardware [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1045.550101] env[69328]: DEBUG nova.virt.hardware [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1045.550101] env[69328]: DEBUG nova.virt.hardware [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1045.550101] env[69328]: DEBUG nova.virt.hardware [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1045.550101] env[69328]: DEBUG nova.virt.hardware [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1045.550101] env[69328]: DEBUG nova.virt.hardware [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1045.550101] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8283cc2-d6ad-4557-85bb-cd37a8d50128 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.560370] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04c34ce5-02e1-4129-a845-6663efb9c2bc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.574172] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:db:a0:8f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '62237242-7ce2-4664-a1c5-6783b516b507', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dce2dda5-86c0-4f69-a3c1-c0f7a5c3b56e', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1045.583629] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1045.584919] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1045.585508] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bc77b79d-12df-4a81-b579-fea6df7a36c4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.610066] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1045.610066] env[69328]: value = "task-3273882" [ 1045.610066] env[69328]: _type = "Task" [ 1045.610066] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.618862] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273882, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.690289] env[69328]: DEBUG nova.compute.utils [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1045.693234] env[69328]: DEBUG nova.objects.instance [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lazy-loading 'numa_topology' on Instance uuid 76210566-12d7-4f6a-afa1-6329e87e0f85 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1045.694432] env[69328]: DEBUG nova.compute.manager [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1045.694605] env[69328]: DEBUG nova.network.neutron [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1045.743175] env[69328]: DEBUG nova.policy [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '929ab12fcdb943a48039c7508e6a0b35', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '088bc9e3aeb449baa0a522342d57d183', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 1045.745240] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "refresh_cache-14521ee3-d749-48b4-aeec-23c94ca2cf9f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1045.745378] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquired lock "refresh_cache-14521ee3-d749-48b4-aeec-23c94ca2cf9f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1045.745438] env[69328]: DEBUG nova.network.neutron [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 14521ee3-d749-48b4-aeec-23c94ca2cf9f] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1045.759463] env[69328]: INFO nova.compute.manager [-] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Took 1.73 seconds to deallocate network for instance. [ 1045.960143] env[69328]: DEBUG nova.compute.manager [req-001bc5fc-8f12-43d4-bf31-1a4ae1796c94 req-f10dea6f-b3e7-40d0-ac9d-7008d068555c service nova] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Received event network-vif-deleted-3b413041-b9e3-47e2-a4f8-f828e31f079a {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1045.960143] env[69328]: DEBUG nova.compute.manager [req-001bc5fc-8f12-43d4-bf31-1a4ae1796c94 req-f10dea6f-b3e7-40d0-ac9d-7008d068555c service nova] [instance: 14521ee3-d749-48b4-aeec-23c94ca2cf9f] Received event network-vif-plugged-14cfba2e-1458-4c09-a1bb-825784ca30af {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1045.960143] env[69328]: DEBUG oslo_concurrency.lockutils [req-001bc5fc-8f12-43d4-bf31-1a4ae1796c94 req-f10dea6f-b3e7-40d0-ac9d-7008d068555c service nova] Acquiring lock "14521ee3-d749-48b4-aeec-23c94ca2cf9f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1045.962157] env[69328]: DEBUG oslo_concurrency.lockutils [req-001bc5fc-8f12-43d4-bf31-1a4ae1796c94 req-f10dea6f-b3e7-40d0-ac9d-7008d068555c service nova] Lock "14521ee3-d749-48b4-aeec-23c94ca2cf9f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.002s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1045.963858] env[69328]: DEBUG oslo_concurrency.lockutils [req-001bc5fc-8f12-43d4-bf31-1a4ae1796c94 req-f10dea6f-b3e7-40d0-ac9d-7008d068555c service nova] Lock "14521ee3-d749-48b4-aeec-23c94ca2cf9f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1045.964204] env[69328]: DEBUG nova.compute.manager [req-001bc5fc-8f12-43d4-bf31-1a4ae1796c94 req-f10dea6f-b3e7-40d0-ac9d-7008d068555c service nova] [instance: 14521ee3-d749-48b4-aeec-23c94ca2cf9f] No waiting events found dispatching network-vif-plugged-14cfba2e-1458-4c09-a1bb-825784ca30af {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1045.964504] env[69328]: WARNING nova.compute.manager [req-001bc5fc-8f12-43d4-bf31-1a4ae1796c94 req-f10dea6f-b3e7-40d0-ac9d-7008d068555c service nova] [instance: 14521ee3-d749-48b4-aeec-23c94ca2cf9f] Received unexpected event network-vif-plugged-14cfba2e-1458-4c09-a1bb-825784ca30af for instance with vm_state building and task_state spawning. [ 1045.965434] env[69328]: DEBUG nova.compute.manager [req-001bc5fc-8f12-43d4-bf31-1a4ae1796c94 req-f10dea6f-b3e7-40d0-ac9d-7008d068555c service nova] [instance: 14521ee3-d749-48b4-aeec-23c94ca2cf9f] Received event network-changed-14cfba2e-1458-4c09-a1bb-825784ca30af {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1045.965434] env[69328]: DEBUG nova.compute.manager [req-001bc5fc-8f12-43d4-bf31-1a4ae1796c94 req-f10dea6f-b3e7-40d0-ac9d-7008d068555c service nova] [instance: 14521ee3-d749-48b4-aeec-23c94ca2cf9f] Refreshing instance network info cache due to event network-changed-14cfba2e-1458-4c09-a1bb-825784ca30af. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1045.965434] env[69328]: DEBUG oslo_concurrency.lockutils [req-001bc5fc-8f12-43d4-bf31-1a4ae1796c94 req-f10dea6f-b3e7-40d0-ac9d-7008d068555c service nova] Acquiring lock "refresh_cache-14521ee3-d749-48b4-aeec-23c94ca2cf9f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1045.975822] env[69328]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1045.975822] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]527212d6-adaf-f2e7-bcc9-407ff332c7e2" [ 1045.975822] env[69328]: _type = "HttpNfcLease" [ 1045.975822] env[69328]: } is initializing. {{(pid=69328) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1046.054390] env[69328]: INFO nova.compute.manager [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] Took 36.63 seconds to build instance. [ 1046.124009] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273882, 'name': CreateVM_Task, 'duration_secs': 0.421243} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.124201] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1046.124908] env[69328]: DEBUG oslo_concurrency.lockutils [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1046.125151] env[69328]: DEBUG oslo_concurrency.lockutils [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1046.125486] env[69328]: DEBUG oslo_concurrency.lockutils [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1046.125754] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28c2e8b1-afd8-4442-ab99-a9b42cf804aa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.131756] env[69328]: DEBUG oslo_vmware.api [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 1046.131756] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]529555fc-33e9-57bf-d83e-49ffea05162a" [ 1046.131756] env[69328]: _type = "Task" [ 1046.131756] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.141281] env[69328]: DEBUG oslo_vmware.api [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]529555fc-33e9-57bf-d83e-49ffea05162a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.190212] env[69328]: DEBUG nova.network.neutron [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Successfully created port: 61188e10-aa7e-4ec8-99f4-bc6a8380b3be {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1046.195209] env[69328]: DEBUG nova.compute.manager [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1046.198533] env[69328]: INFO nova.compute.claims [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1046.266838] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9163baf9-03d1-4a5a-9933-2d2e53d64012 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1046.286842] env[69328]: DEBUG nova.compute.manager [None req-b179d108-cdfc-4430-8ebc-3ed518c246d8 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1046.288119] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7a87798-7203-435e-8cf8-1c9f5e4f1bea {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.300411] env[69328]: DEBUG nova.network.neutron [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 14521ee3-d749-48b4-aeec-23c94ca2cf9f] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1046.469153] env[69328]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1046.469153] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]527212d6-adaf-f2e7-bcc9-407ff332c7e2" [ 1046.469153] env[69328]: _type = "HttpNfcLease" [ 1046.469153] env[69328]: } is ready. {{(pid=69328) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1046.469474] env[69328]: DEBUG oslo_vmware.rw_handles [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1046.469474] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]527212d6-adaf-f2e7-bcc9-407ff332c7e2" [ 1046.469474] env[69328]: _type = "HttpNfcLease" [ 1046.469474] env[69328]: }. {{(pid=69328) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1046.471038] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54b42ac0-7eff-4aa5-be0c-6a287dcde047 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.480378] env[69328]: DEBUG oslo_vmware.rw_handles [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/524eef70-f1cf-12bd-338d-20b2f37f5eaf/disk-0.vmdk from lease info. {{(pid=69328) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1046.480378] env[69328]: DEBUG oslo_vmware.rw_handles [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Creating HTTP connection to write to file with size = 31663616 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/524eef70-f1cf-12bd-338d-20b2f37f5eaf/disk-0.vmdk. {{(pid=69328) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1046.559046] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ba7c1555-f5df-4f7f-9137-d51b5d238b9b tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "ac0f967d-18c8-45d8-94ca-829a1fe11451" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.139s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1046.559372] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Acquiring lock "275ef1ed-8e60-4151-b548-e22e5bd8efe2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1046.559581] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Lock "275ef1ed-8e60-4151-b548-e22e5bd8efe2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1046.564799] env[69328]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-05144463-89d2-4b78-9047-b918c99bb0ef {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.636639] env[69328]: DEBUG nova.network.neutron [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 14521ee3-d749-48b4-aeec-23c94ca2cf9f] Updating instance_info_cache with network_info: [{"id": "14cfba2e-1458-4c09-a1bb-825784ca30af", "address": "fa:16:3e:fa:9e:af", "network": {"id": "e2ab6957-2c9d-4b95-91bd-5a62d9a284ba", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-967470666-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75d5853e3c724d02bacfa75173e38ab3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abcf0d10-3f3f-45dc-923e-1c78766e2dad", "external-id": "nsx-vlan-transportzone-405", "segmentation_id": 405, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14cfba2e-14", "ovs_interfaceid": "14cfba2e-1458-4c09-a1bb-825784ca30af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1046.645804] env[69328]: DEBUG oslo_vmware.api [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]529555fc-33e9-57bf-d83e-49ffea05162a, 'name': SearchDatastore_Task, 'duration_secs': 0.011163} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.646123] env[69328]: DEBUG oslo_concurrency.lockutils [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1046.646320] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1046.646638] env[69328]: DEBUG oslo_concurrency.lockutils [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1046.646737] env[69328]: DEBUG oslo_concurrency.lockutils [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1046.646944] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1046.647368] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c9ec084e-2932-4a5e-8516-794de9968d35 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.656985] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1046.657224] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1046.657905] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e8e7ef8-0075-4f19-9478-c4dee8bfe98a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.663520] env[69328]: DEBUG oslo_vmware.api [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 1046.663520] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b8cf1b-0495-cdff-71f2-a7e40e926c86" [ 1046.663520] env[69328]: _type = "Task" [ 1046.663520] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.672540] env[69328]: DEBUG oslo_vmware.api [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b8cf1b-0495-cdff-71f2-a7e40e926c86, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.799553] env[69328]: INFO nova.compute.manager [None req-b179d108-cdfc-4430-8ebc-3ed518c246d8 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] instance snapshotting [ 1046.804033] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0993660-fd65-49f8-a3a8-a0d8daaf0592 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.823491] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f600593-fe09-44bc-aea3-3b770d0bf214 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.066164] env[69328]: DEBUG nova.compute.manager [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1047.139368] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Releasing lock "refresh_cache-14521ee3-d749-48b4-aeec-23c94ca2cf9f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1047.139922] env[69328]: DEBUG nova.compute.manager [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 14521ee3-d749-48b4-aeec-23c94ca2cf9f] Instance network_info: |[{"id": "14cfba2e-1458-4c09-a1bb-825784ca30af", "address": "fa:16:3e:fa:9e:af", "network": {"id": "e2ab6957-2c9d-4b95-91bd-5a62d9a284ba", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-967470666-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75d5853e3c724d02bacfa75173e38ab3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abcf0d10-3f3f-45dc-923e-1c78766e2dad", "external-id": "nsx-vlan-transportzone-405", "segmentation_id": 405, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14cfba2e-14", "ovs_interfaceid": "14cfba2e-1458-4c09-a1bb-825784ca30af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1047.140468] env[69328]: DEBUG oslo_concurrency.lockutils [req-001bc5fc-8f12-43d4-bf31-1a4ae1796c94 req-f10dea6f-b3e7-40d0-ac9d-7008d068555c service nova] Acquired lock "refresh_cache-14521ee3-d749-48b4-aeec-23c94ca2cf9f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1047.141965] env[69328]: DEBUG nova.network.neutron [req-001bc5fc-8f12-43d4-bf31-1a4ae1796c94 req-f10dea6f-b3e7-40d0-ac9d-7008d068555c service nova] [instance: 14521ee3-d749-48b4-aeec-23c94ca2cf9f] Refreshing network info cache for port 14cfba2e-1458-4c09-a1bb-825784ca30af {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1047.142603] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 14521ee3-d749-48b4-aeec-23c94ca2cf9f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fa:9e:af', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'abcf0d10-3f3f-45dc-923e-1c78766e2dad', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '14cfba2e-1458-4c09-a1bb-825784ca30af', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1047.152555] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1047.153945] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 14521ee3-d749-48b4-aeec-23c94ca2cf9f] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1047.154231] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e9a41b2d-c081-4931-9fc8-1e21d109b990 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.187946] env[69328]: DEBUG oslo_vmware.api [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b8cf1b-0495-cdff-71f2-a7e40e926c86, 'name': SearchDatastore_Task, 'duration_secs': 0.009271} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.192037] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1047.192037] env[69328]: value = "task-3273883" [ 1047.192037] env[69328]: _type = "Task" [ 1047.192037] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.192037] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-456f537f-d9ca-4467-b397-37d5fa31111f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.200375] env[69328]: DEBUG oslo_vmware.api [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 1047.200375] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52dd99b1-2cbd-c5df-38a0-57d51ca0873c" [ 1047.200375] env[69328]: _type = "Task" [ 1047.200375] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.203795] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273883, 'name': CreateVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.210663] env[69328]: DEBUG nova.compute.manager [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1047.218920] env[69328]: DEBUG oslo_vmware.api [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52dd99b1-2cbd-c5df-38a0-57d51ca0873c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.244662] env[69328]: DEBUG nova.virt.hardware [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1047.245067] env[69328]: DEBUG nova.virt.hardware [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1047.245296] env[69328]: DEBUG nova.virt.hardware [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1047.245535] env[69328]: DEBUG nova.virt.hardware [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1047.245776] env[69328]: DEBUG nova.virt.hardware [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1047.245998] env[69328]: DEBUG nova.virt.hardware [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1047.246352] env[69328]: DEBUG nova.virt.hardware [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1047.246707] env[69328]: DEBUG nova.virt.hardware [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1047.247037] env[69328]: DEBUG nova.virt.hardware [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1047.247290] env[69328]: DEBUG nova.virt.hardware [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1047.247535] env[69328]: DEBUG nova.virt.hardware [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1047.248987] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7f362bf-b1ea-4bde-a6e7-49f1005d126e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.261119] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c22c1c50-0cc0-470c-984a-97862517b2a8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.334122] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b179d108-cdfc-4430-8ebc-3ed518c246d8 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Creating Snapshot of the VM instance {{(pid=69328) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1047.334468] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-99dd21fa-403e-4a57-8cc6-f4504944664b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.342782] env[69328]: DEBUG oslo_vmware.api [None req-b179d108-cdfc-4430-8ebc-3ed518c246d8 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 1047.342782] env[69328]: value = "task-3273884" [ 1047.342782] env[69328]: _type = "Task" [ 1047.342782] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.356186] env[69328]: DEBUG oslo_vmware.api [None req-b179d108-cdfc-4430-8ebc-3ed518c246d8 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273884, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.590935] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1047.675680] env[69328]: DEBUG oslo_concurrency.lockutils [None req-94a5dc04-40f3-46a5-8303-2e27cc56aed2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "ac0f967d-18c8-45d8-94ca-829a1fe11451" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1047.675680] env[69328]: DEBUG oslo_concurrency.lockutils [None req-94a5dc04-40f3-46a5-8303-2e27cc56aed2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "ac0f967d-18c8-45d8-94ca-829a1fe11451" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1047.675854] env[69328]: DEBUG oslo_concurrency.lockutils [None req-94a5dc04-40f3-46a5-8303-2e27cc56aed2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "ac0f967d-18c8-45d8-94ca-829a1fe11451-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1047.675951] env[69328]: DEBUG oslo_concurrency.lockutils [None req-94a5dc04-40f3-46a5-8303-2e27cc56aed2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "ac0f967d-18c8-45d8-94ca-829a1fe11451-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1047.676108] env[69328]: DEBUG oslo_concurrency.lockutils [None req-94a5dc04-40f3-46a5-8303-2e27cc56aed2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "ac0f967d-18c8-45d8-94ca-829a1fe11451-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1047.679719] env[69328]: INFO nova.compute.manager [None req-94a5dc04-40f3-46a5-8303-2e27cc56aed2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] Terminating instance [ 1047.707548] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273883, 'name': CreateVM_Task, 'duration_secs': 0.377697} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.714235] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 14521ee3-d749-48b4-aeec-23c94ca2cf9f] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1047.715266] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-000fcd1b-964b-45ec-8e92-16c304de43f6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.723730] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1047.723982] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1047.724866] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1047.726247] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1524d0e0-206e-4c0c-b6d0-258d8392da60 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.735569] env[69328]: DEBUG oslo_vmware.api [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52dd99b1-2cbd-c5df-38a0-57d51ca0873c, 'name': SearchDatastore_Task, 'duration_secs': 0.016993} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.738337] env[69328]: DEBUG oslo_concurrency.lockutils [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1047.738628] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 65e38a02-880b-46e2-8866-645a9fc17c7a/65e38a02-880b-46e2-8866-645a9fc17c7a.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1047.739030] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e3010233-72bc-4e34-8b5b-0a8ec550194e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.741948] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42b2163e-4f67-44a2-a728-84f82acebd9d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.749566] env[69328]: DEBUG oslo_vmware.api [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 1047.749566] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52c6844d-e4a4-77cb-add9-400f011813d8" [ 1047.749566] env[69328]: _type = "Task" [ 1047.749566] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.780861] env[69328]: DEBUG oslo_vmware.api [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 1047.780861] env[69328]: value = "task-3273885" [ 1047.780861] env[69328]: _type = "Task" [ 1047.780861] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.785946] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18d00980-0985-4674-9534-d9f2a28afe14 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.797567] env[69328]: DEBUG oslo_vmware.api [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52c6844d-e4a4-77cb-add9-400f011813d8, 'name': SearchDatastore_Task, 'duration_secs': 0.011661} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.800840] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1047.801141] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 14521ee3-d749-48b4-aeec-23c94ca2cf9f] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1047.801435] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1047.801609] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1047.801871] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1047.806137] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c8b3cd78-c93b-40e6-8a1b-8e2f00cad8d4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.817186] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-529bad4c-2185-4a3c-afc2-68c1c99218c6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.822372] env[69328]: DEBUG oslo_vmware.api [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273885, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.836188] env[69328]: DEBUG nova.compute.provider_tree [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1047.842555] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1047.842555] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1047.846995] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9f0c4ff-97e2-4864-b5bb-aa80eb4ab04e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.864474] env[69328]: DEBUG oslo_vmware.api [None req-b179d108-cdfc-4430-8ebc-3ed518c246d8 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273884, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.868438] env[69328]: DEBUG oslo_vmware.api [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 1047.868438] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5236174d-9e9b-c569-ee9a-ff6ef8a42c66" [ 1047.868438] env[69328]: _type = "Task" [ 1047.868438] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.882078] env[69328]: DEBUG oslo_vmware.api [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5236174d-9e9b-c569-ee9a-ff6ef8a42c66, 'name': SearchDatastore_Task, 'duration_secs': 0.01093} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.884636] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6c3eb73b-47d6-433d-adb3-8374fae7e8f2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.890390] env[69328]: DEBUG oslo_vmware.api [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 1047.890390] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5256f079-f8bc-8635-d34e-15414f1f08c6" [ 1047.890390] env[69328]: _type = "Task" [ 1047.890390] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.906891] env[69328]: DEBUG oslo_vmware.api [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5256f079-f8bc-8635-d34e-15414f1f08c6, 'name': SearchDatastore_Task, 'duration_secs': 0.011395} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.909293] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1047.909619] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 14521ee3-d749-48b4-aeec-23c94ca2cf9f/14521ee3-d749-48b4-aeec-23c94ca2cf9f.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1047.909960] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a37a8872-6d74-4810-bb8b-5da397258b71 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.917072] env[69328]: DEBUG oslo_vmware.api [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 1047.917072] env[69328]: value = "task-3273886" [ 1047.917072] env[69328]: _type = "Task" [ 1047.917072] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.929219] env[69328]: DEBUG oslo_vmware.api [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273886, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.125367] env[69328]: DEBUG nova.compute.manager [req-602c9cdb-03c4-4a4a-88ca-2b14e6789481 req-384e096b-10b5-45e3-a4c8-936059be615b service nova] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Received event network-vif-plugged-61188e10-aa7e-4ec8-99f4-bc6a8380b3be {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1048.125732] env[69328]: DEBUG oslo_concurrency.lockutils [req-602c9cdb-03c4-4a4a-88ca-2b14e6789481 req-384e096b-10b5-45e3-a4c8-936059be615b service nova] Acquiring lock "071c1837-9d0b-4b69-b16e-991b300385fb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1048.125837] env[69328]: DEBUG oslo_concurrency.lockutils [req-602c9cdb-03c4-4a4a-88ca-2b14e6789481 req-384e096b-10b5-45e3-a4c8-936059be615b service nova] Lock "071c1837-9d0b-4b69-b16e-991b300385fb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1048.126603] env[69328]: DEBUG oslo_concurrency.lockutils [req-602c9cdb-03c4-4a4a-88ca-2b14e6789481 req-384e096b-10b5-45e3-a4c8-936059be615b service nova] Lock "071c1837-9d0b-4b69-b16e-991b300385fb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1048.126603] env[69328]: DEBUG nova.compute.manager [req-602c9cdb-03c4-4a4a-88ca-2b14e6789481 req-384e096b-10b5-45e3-a4c8-936059be615b service nova] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] No waiting events found dispatching network-vif-plugged-61188e10-aa7e-4ec8-99f4-bc6a8380b3be {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1048.126603] env[69328]: WARNING nova.compute.manager [req-602c9cdb-03c4-4a4a-88ca-2b14e6789481 req-384e096b-10b5-45e3-a4c8-936059be615b service nova] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Received unexpected event network-vif-plugged-61188e10-aa7e-4ec8-99f4-bc6a8380b3be for instance with vm_state building and task_state spawning. [ 1048.185650] env[69328]: DEBUG nova.compute.manager [None req-94a5dc04-40f3-46a5-8303-2e27cc56aed2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1048.185961] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-94a5dc04-40f3-46a5-8303-2e27cc56aed2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1048.187562] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df2ad998-d122-4d52-b86e-154201011ae9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.197252] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-94a5dc04-40f3-46a5-8303-2e27cc56aed2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1048.198179] env[69328]: DEBUG nova.network.neutron [req-001bc5fc-8f12-43d4-bf31-1a4ae1796c94 req-f10dea6f-b3e7-40d0-ac9d-7008d068555c service nova] [instance: 14521ee3-d749-48b4-aeec-23c94ca2cf9f] Updated VIF entry in instance network info cache for port 14cfba2e-1458-4c09-a1bb-825784ca30af. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1048.198641] env[69328]: DEBUG nova.network.neutron [req-001bc5fc-8f12-43d4-bf31-1a4ae1796c94 req-f10dea6f-b3e7-40d0-ac9d-7008d068555c service nova] [instance: 14521ee3-d749-48b4-aeec-23c94ca2cf9f] Updating instance_info_cache with network_info: [{"id": "14cfba2e-1458-4c09-a1bb-825784ca30af", "address": "fa:16:3e:fa:9e:af", "network": {"id": "e2ab6957-2c9d-4b95-91bd-5a62d9a284ba", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-967470666-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75d5853e3c724d02bacfa75173e38ab3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abcf0d10-3f3f-45dc-923e-1c78766e2dad", "external-id": "nsx-vlan-transportzone-405", "segmentation_id": 405, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14cfba2e-14", "ovs_interfaceid": "14cfba2e-1458-4c09-a1bb-825784ca30af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1048.200216] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a21167c9-f589-4fc7-a4d8-155629a0555c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.208152] env[69328]: DEBUG oslo_vmware.api [None req-94a5dc04-40f3-46a5-8303-2e27cc56aed2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 1048.208152] env[69328]: value = "task-3273887" [ 1048.208152] env[69328]: _type = "Task" [ 1048.208152] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.220949] env[69328]: DEBUG oslo_vmware.api [None req-94a5dc04-40f3-46a5-8303-2e27cc56aed2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273887, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.299179] env[69328]: DEBUG nova.network.neutron [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Successfully updated port: 61188e10-aa7e-4ec8-99f4-bc6a8380b3be {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1048.305269] env[69328]: DEBUG oslo_vmware.api [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273885, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.341932] env[69328]: DEBUG nova.scheduler.client.report [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1048.360686] env[69328]: DEBUG oslo_vmware.api [None req-b179d108-cdfc-4430-8ebc-3ed518c246d8 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273884, 'name': CreateSnapshot_Task, 'duration_secs': 0.618259} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.362596] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b179d108-cdfc-4430-8ebc-3ed518c246d8 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Created Snapshot of the VM instance {{(pid=69328) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1048.364082] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-964ce09f-72a8-4935-b631-885aa1ba5257 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.431603] env[69328]: DEBUG oslo_vmware.api [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273886, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.703045] env[69328]: DEBUG oslo_concurrency.lockutils [req-001bc5fc-8f12-43d4-bf31-1a4ae1796c94 req-f10dea6f-b3e7-40d0-ac9d-7008d068555c service nova] Releasing lock "refresh_cache-14521ee3-d749-48b4-aeec-23c94ca2cf9f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1048.722787] env[69328]: DEBUG oslo_vmware.api [None req-94a5dc04-40f3-46a5-8303-2e27cc56aed2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273887, 'name': PowerOffVM_Task, 'duration_secs': 0.254089} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.723181] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-94a5dc04-40f3-46a5-8303-2e27cc56aed2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1048.723380] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-94a5dc04-40f3-46a5-8303-2e27cc56aed2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1048.723726] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6067cdff-a40d-4fa0-840f-1a83908e88d9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.747532] env[69328]: DEBUG oslo_vmware.rw_handles [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Completed reading data from the image iterator. {{(pid=69328) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1048.747875] env[69328]: DEBUG oslo_vmware.rw_handles [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/524eef70-f1cf-12bd-338d-20b2f37f5eaf/disk-0.vmdk. {{(pid=69328) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1048.748985] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7c1c804-cd02-4874-9f43-c0402a6c9105 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.756692] env[69328]: DEBUG oslo_vmware.rw_handles [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/524eef70-f1cf-12bd-338d-20b2f37f5eaf/disk-0.vmdk is in state: ready. {{(pid=69328) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1048.757093] env[69328]: DEBUG oslo_vmware.rw_handles [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/524eef70-f1cf-12bd-338d-20b2f37f5eaf/disk-0.vmdk. {{(pid=69328) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1048.758461] env[69328]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-76f181a7-8675-4c72-a5fa-2e0041adc7aa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.801330] env[69328]: DEBUG oslo_vmware.api [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273885, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.556456} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.801630] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 65e38a02-880b-46e2-8866-645a9fc17c7a/65e38a02-880b-46e2-8866-645a9fc17c7a.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1048.801909] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1048.802292] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-06249737-b860-4dc9-9442-4debe4ce069b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.809880] env[69328]: DEBUG oslo_concurrency.lockutils [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquiring lock "refresh_cache-071c1837-9d0b-4b69-b16e-991b300385fb" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1048.810089] env[69328]: DEBUG oslo_concurrency.lockutils [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquired lock "refresh_cache-071c1837-9d0b-4b69-b16e-991b300385fb" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1048.810180] env[69328]: DEBUG nova.network.neutron [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1048.811596] env[69328]: DEBUG oslo_vmware.api [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 1048.811596] env[69328]: value = "task-3273889" [ 1048.811596] env[69328]: _type = "Task" [ 1048.811596] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.819090] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-94a5dc04-40f3-46a5-8303-2e27cc56aed2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1048.819236] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-94a5dc04-40f3-46a5-8303-2e27cc56aed2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1048.819405] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-94a5dc04-40f3-46a5-8303-2e27cc56aed2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Deleting the datastore file [datastore2] ac0f967d-18c8-45d8-94ca-829a1fe11451 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1048.819742] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9193adde-a34b-4967-92df-2cfd9860bf30 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.825420] env[69328]: DEBUG oslo_vmware.api [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273889, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.832474] env[69328]: DEBUG oslo_vmware.api [None req-94a5dc04-40f3-46a5-8303-2e27cc56aed2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 1048.832474] env[69328]: value = "task-3273890" [ 1048.832474] env[69328]: _type = "Task" [ 1048.832474] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.844059] env[69328]: DEBUG oslo_vmware.api [None req-94a5dc04-40f3-46a5-8303-2e27cc56aed2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273890, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.847482] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.661s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1048.850110] env[69328]: DEBUG oslo_concurrency.lockutils [None req-81ca0695-bba3-49b9-ac61-689ae87a5224 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.939s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1048.850419] env[69328]: DEBUG nova.objects.instance [None req-81ca0695-bba3-49b9-ac61-689ae87a5224 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lazy-loading 'resources' on Instance uuid 3ba646e8-a5c8-4917-a1c4-32b37affb598 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1048.884725] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b179d108-cdfc-4430-8ebc-3ed518c246d8 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Creating linked-clone VM from snapshot {{(pid=69328) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1048.885527] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-442db09f-911f-4a35-bd95-7982962bba79 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.891625] env[69328]: INFO nova.network.neutron [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Updating port a3cab44b-0572-4007-bab9-e84ba084f70a with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1048.897082] env[69328]: DEBUG oslo_vmware.api [None req-b179d108-cdfc-4430-8ebc-3ed518c246d8 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 1048.897082] env[69328]: value = "task-3273891" [ 1048.897082] env[69328]: _type = "Task" [ 1048.897082] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.908465] env[69328]: DEBUG oslo_vmware.api [None req-b179d108-cdfc-4430-8ebc-3ed518c246d8 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273891, 'name': CloneVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.931818] env[69328]: DEBUG oslo_vmware.api [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273886, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.286863] env[69328]: DEBUG oslo_vmware.rw_handles [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/524eef70-f1cf-12bd-338d-20b2f37f5eaf/disk-0.vmdk. {{(pid=69328) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1049.287207] env[69328]: INFO nova.virt.vmwareapi.images [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Downloaded image file data 8868d8b6-e8a6-4c40-9bca-fb6ec2c24443 [ 1049.288070] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7b1954d-5875-4e9b-b1a5-d26cf8ab0ed5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.304519] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1fb2ef64-6b14-4ef3-a0b0-82a50d59209b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.323049] env[69328]: DEBUG oslo_vmware.api [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273889, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.097237} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.323332] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1049.324168] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffb8c6c4-3792-4917-a7bf-28e1aa50a51f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.346642] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Reconfiguring VM instance instance-00000052 to attach disk [datastore1] 65e38a02-880b-46e2-8866-645a9fc17c7a/65e38a02-880b-46e2-8866-645a9fc17c7a.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1049.350281] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-17c27053-66de-4959-8599-816131a2d9aa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.364850] env[69328]: DEBUG nova.network.neutron [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1049.380971] env[69328]: DEBUG oslo_vmware.api [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 1049.380971] env[69328]: value = "task-3273893" [ 1049.380971] env[69328]: _type = "Task" [ 1049.380971] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.383147] env[69328]: INFO nova.virt.vmwareapi.images [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] The imported VM was unregistered [ 1049.386579] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Caching image {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1049.386864] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Creating directory with path [datastore1] devstack-image-cache_base/8868d8b6-e8a6-4c40-9bca-fb6ec2c24443 {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1049.387848] env[69328]: DEBUG oslo_vmware.api [None req-94a5dc04-40f3-46a5-8303-2e27cc56aed2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273890, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.236803} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.391143] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-50dac2c3-3cdf-49c7-bc3a-6c5f41fc1f1f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.393044] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-94a5dc04-40f3-46a5-8303-2e27cc56aed2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1049.393853] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-94a5dc04-40f3-46a5-8303-2e27cc56aed2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1049.393853] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-94a5dc04-40f3-46a5-8303-2e27cc56aed2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1049.393853] env[69328]: INFO nova.compute.manager [None req-94a5dc04-40f3-46a5-8303-2e27cc56aed2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1049.393853] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-94a5dc04-40f3-46a5-8303-2e27cc56aed2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1049.396276] env[69328]: DEBUG nova.compute.manager [-] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1049.396369] env[69328]: DEBUG nova.network.neutron [-] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1049.411078] env[69328]: DEBUG oslo_vmware.api [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273893, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.413186] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Created directory with path [datastore1] devstack-image-cache_base/8868d8b6-e8a6-4c40-9bca-fb6ec2c24443 {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1049.413186] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_93ca0648-addc-4dd4-9ca8-ca5b832efff4/OSTACK_IMG_93ca0648-addc-4dd4-9ca8-ca5b832efff4.vmdk to [datastore1] devstack-image-cache_base/8868d8b6-e8a6-4c40-9bca-fb6ec2c24443/8868d8b6-e8a6-4c40-9bca-fb6ec2c24443.vmdk. {{(pid=69328) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1049.413467] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-398e9f3a-8987-45e5-acf5-ee50c0343442 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.419413] env[69328]: DEBUG oslo_vmware.api [None req-b179d108-cdfc-4430-8ebc-3ed518c246d8 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273891, 'name': CloneVM_Task} progress is 94%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.427221] env[69328]: DEBUG oslo_vmware.api [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Waiting for the task: (returnval){ [ 1049.427221] env[69328]: value = "task-3273894" [ 1049.427221] env[69328]: _type = "Task" [ 1049.427221] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.434295] env[69328]: DEBUG oslo_vmware.api [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273886, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.022739} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.434503] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 14521ee3-d749-48b4-aeec-23c94ca2cf9f/14521ee3-d749-48b4-aeec-23c94ca2cf9f.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1049.434892] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 14521ee3-d749-48b4-aeec-23c94ca2cf9f] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1049.435020] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ec4ecd57-26d4-4faa-a843-ba8d43fa44ee {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.440060] env[69328]: DEBUG oslo_vmware.api [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273894, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.446178] env[69328]: DEBUG oslo_vmware.api [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 1049.446178] env[69328]: value = "task-3273895" [ 1049.446178] env[69328]: _type = "Task" [ 1049.446178] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.455120] env[69328]: DEBUG oslo_vmware.api [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273895, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.603330] env[69328]: DEBUG nova.network.neutron [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Updating instance_info_cache with network_info: [{"id": "61188e10-aa7e-4ec8-99f4-bc6a8380b3be", "address": "fa:16:3e:f9:19:ef", "network": {"id": "2e8bdd1b-0cca-4f7c-bba3-ff1750073020", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2058593014-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "088bc9e3aeb449baa0a522342d57d183", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61188e10-aa", "ovs_interfaceid": "61188e10-aa7e-4ec8-99f4-bc6a8380b3be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1049.773443] env[69328]: DEBUG nova.compute.manager [req-e8a2ebac-dafa-476e-87ce-2b5cf12cb71c req-7d273a0a-05ea-4a6d-84ae-17de70983ba8 service nova] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] Received event network-vif-deleted-7db792d2-ce9f-4333-b755-84eb8e83d788 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1049.773721] env[69328]: INFO nova.compute.manager [req-e8a2ebac-dafa-476e-87ce-2b5cf12cb71c req-7d273a0a-05ea-4a6d-84ae-17de70983ba8 service nova] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] Neutron deleted interface 7db792d2-ce9f-4333-b755-84eb8e83d788; detaching it from the instance and deleting it from the info cache [ 1049.773868] env[69328]: DEBUG nova.network.neutron [req-e8a2ebac-dafa-476e-87ce-2b5cf12cb71c req-7d273a0a-05ea-4a6d-84ae-17de70983ba8 service nova] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1049.855688] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44703338-17da-437d-813a-033f8c795072 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.869368] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7e53191-638d-4119-b9c1-6468ff130916 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.911849] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61f75bb5-7ba7-4697-835d-8d7ef13fd564 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.922027] env[69328]: DEBUG oslo_vmware.api [None req-b179d108-cdfc-4430-8ebc-3ed518c246d8 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273891, 'name': CloneVM_Task} progress is 94%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.927911] env[69328]: DEBUG oslo_vmware.api [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273893, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.929482] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79cf7558-c10e-4eea-a079-928708a60e15 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.949513] env[69328]: DEBUG nova.compute.provider_tree [None req-81ca0695-bba3-49b9-ac61-689ae87a5224 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1049.957680] env[69328]: DEBUG oslo_vmware.api [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273894, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.959320] env[69328]: DEBUG nova.scheduler.client.report [None req-81ca0695-bba3-49b9-ac61-689ae87a5224 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1049.972389] env[69328]: DEBUG oslo_vmware.api [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273895, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.173355} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.973245] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 14521ee3-d749-48b4-aeec-23c94ca2cf9f] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1049.973691] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c3c42b0-cfdd-4c20-b03a-7fa0eed31ea1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.001339] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 14521ee3-d749-48b4-aeec-23c94ca2cf9f] Reconfiguring VM instance instance-00000064 to attach disk [datastore1] 14521ee3-d749-48b4-aeec-23c94ca2cf9f/14521ee3-d749-48b4-aeec-23c94ca2cf9f.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1050.002049] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-781c02e8-dfe8-4d79-97de-f08ac3fca9a9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.025906] env[69328]: DEBUG oslo_vmware.api [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 1050.025906] env[69328]: value = "task-3273896" [ 1050.025906] env[69328]: _type = "Task" [ 1050.025906] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.036275] env[69328]: DEBUG oslo_vmware.api [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273896, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.106991] env[69328]: DEBUG oslo_concurrency.lockutils [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Releasing lock "refresh_cache-071c1837-9d0b-4b69-b16e-991b300385fb" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1050.107393] env[69328]: DEBUG nova.compute.manager [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Instance network_info: |[{"id": "61188e10-aa7e-4ec8-99f4-bc6a8380b3be", "address": "fa:16:3e:f9:19:ef", "network": {"id": "2e8bdd1b-0cca-4f7c-bba3-ff1750073020", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2058593014-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "088bc9e3aeb449baa0a522342d57d183", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61188e10-aa", "ovs_interfaceid": "61188e10-aa7e-4ec8-99f4-bc6a8380b3be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1050.107882] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f9:19:ef', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '43ad01d2-c7dd-453c-a929-8ad76294d13c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '61188e10-aa7e-4ec8-99f4-bc6a8380b3be', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1050.116211] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1050.116470] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1050.116698] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-68ed2ab2-2a6f-4c05-8621-fa0f4811c957 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.141520] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1050.141520] env[69328]: value = "task-3273897" [ 1050.141520] env[69328]: _type = "Task" [ 1050.141520] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.152558] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273897, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.206063] env[69328]: DEBUG nova.compute.manager [req-305eebd3-ac42-40c8-b3f9-221b144d7b6e req-c8a04896-b5d6-4d2a-9156-c040558d7b59 service nova] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Received event network-changed-61188e10-aa7e-4ec8-99f4-bc6a8380b3be {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1050.206273] env[69328]: DEBUG nova.compute.manager [req-305eebd3-ac42-40c8-b3f9-221b144d7b6e req-c8a04896-b5d6-4d2a-9156-c040558d7b59 service nova] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Refreshing instance network info cache due to event network-changed-61188e10-aa7e-4ec8-99f4-bc6a8380b3be. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1050.206622] env[69328]: DEBUG oslo_concurrency.lockutils [req-305eebd3-ac42-40c8-b3f9-221b144d7b6e req-c8a04896-b5d6-4d2a-9156-c040558d7b59 service nova] Acquiring lock "refresh_cache-071c1837-9d0b-4b69-b16e-991b300385fb" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1050.206773] env[69328]: DEBUG oslo_concurrency.lockutils [req-305eebd3-ac42-40c8-b3f9-221b144d7b6e req-c8a04896-b5d6-4d2a-9156-c040558d7b59 service nova] Acquired lock "refresh_cache-071c1837-9d0b-4b69-b16e-991b300385fb" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1050.206934] env[69328]: DEBUG nova.network.neutron [req-305eebd3-ac42-40c8-b3f9-221b144d7b6e req-c8a04896-b5d6-4d2a-9156-c040558d7b59 service nova] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Refreshing network info cache for port 61188e10-aa7e-4ec8-99f4-bc6a8380b3be {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1050.252997] env[69328]: DEBUG nova.network.neutron [-] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1050.278865] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e426916b-d289-41b7-8945-2fb319b0b7f8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.292678] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37c40a5d-5634-4c01-ba4e-da476c12c04c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.336665] env[69328]: DEBUG nova.compute.manager [req-e8a2ebac-dafa-476e-87ce-2b5cf12cb71c req-7d273a0a-05ea-4a6d-84ae-17de70983ba8 service nova] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] Detach interface failed, port_id=7db792d2-ce9f-4333-b755-84eb8e83d788, reason: Instance ac0f967d-18c8-45d8-94ca-829a1fe11451 could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1050.420289] env[69328]: DEBUG oslo_vmware.api [None req-b179d108-cdfc-4430-8ebc-3ed518c246d8 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273891, 'name': CloneVM_Task} progress is 95%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.423990] env[69328]: DEBUG oslo_vmware.api [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273893, 'name': ReconfigVM_Task, 'duration_secs': 0.988575} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.424354] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Reconfigured VM instance instance-00000052 to attach disk [datastore1] 65e38a02-880b-46e2-8866-645a9fc17c7a/65e38a02-880b-46e2-8866-645a9fc17c7a.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1050.425046] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3aef022e-785c-4d1d-a0fa-6bca29c1fd3d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.435155] env[69328]: DEBUG oslo_vmware.api [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 1050.435155] env[69328]: value = "task-3273898" [ 1050.435155] env[69328]: _type = "Task" [ 1050.435155] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.449382] env[69328]: DEBUG oslo_vmware.api [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273894, 'name': MoveVirtualDisk_Task} progress is 40%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.453239] env[69328]: DEBUG oslo_vmware.api [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273898, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.469324] env[69328]: DEBUG oslo_concurrency.lockutils [None req-81ca0695-bba3-49b9-ac61-689ae87a5224 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.617s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1050.470920] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.746s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1050.473461] env[69328]: INFO nova.compute.claims [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1050.502050] env[69328]: INFO nova.scheduler.client.report [None req-81ca0695-bba3-49b9-ac61-689ae87a5224 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Deleted allocations for instance 3ba646e8-a5c8-4917-a1c4-32b37affb598 [ 1050.539152] env[69328]: DEBUG oslo_vmware.api [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273896, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.617680] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Acquiring lock "refresh_cache-76210566-12d7-4f6a-afa1-6329e87e0f85" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1050.618079] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Acquired lock "refresh_cache-76210566-12d7-4f6a-afa1-6329e87e0f85" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1050.618168] env[69328]: DEBUG nova.network.neutron [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1050.658783] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273897, 'name': CreateVM_Task} progress is 25%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.755545] env[69328]: INFO nova.compute.manager [-] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] Took 1.36 seconds to deallocate network for instance. [ 1050.917077] env[69328]: DEBUG oslo_vmware.api [None req-b179d108-cdfc-4430-8ebc-3ed518c246d8 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273891, 'name': CloneVM_Task, 'duration_secs': 1.836051} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.917408] env[69328]: INFO nova.virt.vmwareapi.vmops [None req-b179d108-cdfc-4430-8ebc-3ed518c246d8 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Created linked-clone VM from snapshot [ 1050.918736] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-187b23c8-6293-4080-b48b-645e74ff0f52 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.933079] env[69328]: DEBUG nova.virt.vmwareapi.images [None req-b179d108-cdfc-4430-8ebc-3ed518c246d8 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Uploading image 6ca2a71d-1fd8-4fbe-874f-4228a2a0dfdc {{(pid=69328) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1050.948141] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-b179d108-cdfc-4430-8ebc-3ed518c246d8 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Destroying the VM {{(pid=69328) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1050.948510] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-c948ae4a-a225-4ffa-a661-8d7ff2bec627 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.959687] env[69328]: DEBUG oslo_vmware.api [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273894, 'name': MoveVirtualDisk_Task} progress is 63%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.960078] env[69328]: DEBUG oslo_vmware.api [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273898, 'name': Rename_Task, 'duration_secs': 0.472287} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.960995] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1050.961304] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-281e86b7-e4eb-40cd-86ac-3eceed61f39c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.968349] env[69328]: DEBUG oslo_vmware.api [None req-b179d108-cdfc-4430-8ebc-3ed518c246d8 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 1050.968349] env[69328]: value = "task-3273899" [ 1050.968349] env[69328]: _type = "Task" [ 1050.968349] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.979284] env[69328]: DEBUG oslo_vmware.api [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 1050.979284] env[69328]: value = "task-3273900" [ 1050.979284] env[69328]: _type = "Task" [ 1050.979284] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.990600] env[69328]: DEBUG oslo_vmware.api [None req-b179d108-cdfc-4430-8ebc-3ed518c246d8 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273899, 'name': Destroy_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.998454] env[69328]: DEBUG oslo_vmware.api [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273900, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.011886] env[69328]: DEBUG oslo_concurrency.lockutils [None req-81ca0695-bba3-49b9-ac61-689ae87a5224 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "3ba646e8-a5c8-4917-a1c4-32b37affb598" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.779s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1051.039374] env[69328]: DEBUG oslo_vmware.api [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273896, 'name': ReconfigVM_Task, 'duration_secs': 0.69311} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.039578] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 14521ee3-d749-48b4-aeec-23c94ca2cf9f] Reconfigured VM instance instance-00000064 to attach disk [datastore1] 14521ee3-d749-48b4-aeec-23c94ca2cf9f/14521ee3-d749-48b4-aeec-23c94ca2cf9f.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1051.040316] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3b645550-f403-4a42-b82d-bb261fe3425b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.049937] env[69328]: DEBUG oslo_vmware.api [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 1051.049937] env[69328]: value = "task-3273901" [ 1051.049937] env[69328]: _type = "Task" [ 1051.049937] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.061271] env[69328]: DEBUG oslo_vmware.api [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273901, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.157216] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273897, 'name': CreateVM_Task} progress is 25%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.183328] env[69328]: DEBUG nova.network.neutron [req-305eebd3-ac42-40c8-b3f9-221b144d7b6e req-c8a04896-b5d6-4d2a-9156-c040558d7b59 service nova] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Updated VIF entry in instance network info cache for port 61188e10-aa7e-4ec8-99f4-bc6a8380b3be. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1051.183783] env[69328]: DEBUG nova.network.neutron [req-305eebd3-ac42-40c8-b3f9-221b144d7b6e req-c8a04896-b5d6-4d2a-9156-c040558d7b59 service nova] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Updating instance_info_cache with network_info: [{"id": "61188e10-aa7e-4ec8-99f4-bc6a8380b3be", "address": "fa:16:3e:f9:19:ef", "network": {"id": "2e8bdd1b-0cca-4f7c-bba3-ff1750073020", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2058593014-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "088bc9e3aeb449baa0a522342d57d183", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61188e10-aa", "ovs_interfaceid": "61188e10-aa7e-4ec8-99f4-bc6a8380b3be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1051.271690] env[69328]: DEBUG oslo_concurrency.lockutils [None req-94a5dc04-40f3-46a5-8303-2e27cc56aed2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1051.394364] env[69328]: DEBUG nova.network.neutron [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Updating instance_info_cache with network_info: [{"id": "a3cab44b-0572-4007-bab9-e84ba084f70a", "address": "fa:16:3e:02:7d:25", "network": {"id": "3be6b159-5d5c-4752-b79d-0ed6110569d0", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-915151552-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.128", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f50ac50ef6ae4abc83a8064746de7029", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe20ef0e-0991-44d7-887d-08dddac0b56b", "external-id": "nsx-vlan-transportzone-991", "segmentation_id": 991, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3cab44b-05", "ovs_interfaceid": "a3cab44b-0572-4007-bab9-e84ba084f70a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1051.449058] env[69328]: DEBUG oslo_vmware.api [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273894, 'name': MoveVirtualDisk_Task} progress is 83%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.481972] env[69328]: DEBUG oslo_vmware.api [None req-b179d108-cdfc-4430-8ebc-3ed518c246d8 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273899, 'name': Destroy_Task} progress is 33%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.494975] env[69328]: DEBUG oslo_vmware.api [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273900, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.564091] env[69328]: DEBUG oslo_vmware.api [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273901, 'name': Rename_Task, 'duration_secs': 0.214058} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.564318] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 14521ee3-d749-48b4-aeec-23c94ca2cf9f] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1051.564579] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-61c15b17-2cfa-4669-8892-0560e4687d77 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.573601] env[69328]: DEBUG oslo_vmware.api [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 1051.573601] env[69328]: value = "task-3273902" [ 1051.573601] env[69328]: _type = "Task" [ 1051.573601] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.586182] env[69328]: DEBUG oslo_vmware.api [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273902, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.638896] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5959cfbe-e2bf-48b9-bbc3-e7597d70bc24 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "interface-dc050589-e37a-4798-9532-df4ecfab7eb1-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1051.639534] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5959cfbe-e2bf-48b9-bbc3-e7597d70bc24 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "interface-dc050589-e37a-4798-9532-df4ecfab7eb1-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1051.640194] env[69328]: DEBUG nova.objects.instance [None req-5959cfbe-e2bf-48b9-bbc3-e7597d70bc24 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lazy-loading 'flavor' on Instance uuid dc050589-e37a-4798-9532-df4ecfab7eb1 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1051.658029] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273897, 'name': CreateVM_Task} progress is 25%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.688187] env[69328]: DEBUG oslo_concurrency.lockutils [req-305eebd3-ac42-40c8-b3f9-221b144d7b6e req-c8a04896-b5d6-4d2a-9156-c040558d7b59 service nova] Releasing lock "refresh_cache-071c1837-9d0b-4b69-b16e-991b300385fb" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1051.889401] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51e0e6dd-dcf3-4d19-85fa-b42a01dad1cd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.900266] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Releasing lock "refresh_cache-76210566-12d7-4f6a-afa1-6329e87e0f85" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1051.903328] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-629dfb16-2132-4f53-bb61-cb66328b9010 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.939695] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-396beaf4-8373-4f5e-8924-217efc9670c4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.950659] env[69328]: DEBUG nova.virt.hardware [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='679d116a382dd89dd08572bf98e767da',container_format='bare',created_at=2025-04-03T17:42:17Z,direct_url=,disk_format='vmdk',id=83b1e553-81a0-4dcf-a9f7-df6e5e0289ab,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1493994984-shelved',owner='f50ac50ef6ae4abc83a8064746de7029',properties=ImageMetaProps,protected=,size=31668224,status='active',tags=,updated_at=2025-04-03T17:42:34Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1051.950953] env[69328]: DEBUG nova.virt.hardware [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1051.951134] env[69328]: DEBUG nova.virt.hardware [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1051.951342] env[69328]: DEBUG nova.virt.hardware [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1051.951493] env[69328]: DEBUG nova.virt.hardware [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1051.951664] env[69328]: DEBUG nova.virt.hardware [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1051.951886] env[69328]: DEBUG nova.virt.hardware [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1051.952072] env[69328]: DEBUG nova.virt.hardware [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1051.952243] env[69328]: DEBUG nova.virt.hardware [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1051.952455] env[69328]: DEBUG nova.virt.hardware [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1051.952694] env[69328]: DEBUG nova.virt.hardware [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1051.953555] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4a03b0c-2c2b-4db5-9c0f-2eea84eb02ec {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.961640] env[69328]: DEBUG oslo_vmware.api [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273894, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.488232} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.963494] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-372d577a-39c2-4256-9526-fbaa5cb5b81e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.968924] env[69328]: INFO nova.virt.vmwareapi.ds_util [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_93ca0648-addc-4dd4-9ca8-ca5b832efff4/OSTACK_IMG_93ca0648-addc-4dd4-9ca8-ca5b832efff4.vmdk to [datastore1] devstack-image-cache_base/8868d8b6-e8a6-4c40-9bca-fb6ec2c24443/8868d8b6-e8a6-4c40-9bca-fb6ec2c24443.vmdk. [ 1051.968924] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Cleaning up location [datastore1] OSTACK_IMG_93ca0648-addc-4dd4-9ca8-ca5b832efff4 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1051.968924] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_93ca0648-addc-4dd4-9ca8-ca5b832efff4 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1051.971301] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-97ff0f9d-e0da-45e7-b1af-03f825641c5f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.978362] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3810c97-8e16-4c73-895a-6cbe01acbeff {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.991977] env[69328]: DEBUG nova.compute.provider_tree [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1052.002141] env[69328]: DEBUG oslo_vmware.api [None req-b179d108-cdfc-4430-8ebc-3ed518c246d8 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273899, 'name': Destroy_Task, 'duration_secs': 0.568122} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.002457] env[69328]: DEBUG oslo_vmware.api [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Waiting for the task: (returnval){ [ 1052.002457] env[69328]: value = "task-3273903" [ 1052.002457] env[69328]: _type = "Task" [ 1052.002457] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.012739] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-b179d108-cdfc-4430-8ebc-3ed518c246d8 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Destroyed the VM [ 1052.013217] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b179d108-cdfc-4430-8ebc-3ed518c246d8 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Deleting Snapshot of the VM instance {{(pid=69328) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1052.013775] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:02:7d:25', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fe20ef0e-0991-44d7-887d-08dddac0b56b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a3cab44b-0572-4007-bab9-e84ba084f70a', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1052.022407] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1052.026769] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-c0034ebd-8193-4911-9eb1-fe3fdafa9790 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.033563] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1052.033961] env[69328]: DEBUG oslo_vmware.api [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273900, 'name': PowerOnVM_Task, 'duration_secs': 1.043389} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.034648] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e1337bfd-0047-4253-ba98-b0b3a4c84028 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.050693] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1052.050693] env[69328]: DEBUG nova.compute.manager [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1052.052543] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cc70649-48da-4637-8c20-ba84e613d243 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.059601] env[69328]: DEBUG oslo_vmware.api [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273903, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.044087} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.059878] env[69328]: DEBUG oslo_vmware.api [None req-b179d108-cdfc-4430-8ebc-3ed518c246d8 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 1052.059878] env[69328]: value = "task-3273904" [ 1052.059878] env[69328]: _type = "Task" [ 1052.059878] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.060416] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1052.060560] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Releasing lock "[datastore1] devstack-image-cache_base/8868d8b6-e8a6-4c40-9bca-fb6ec2c24443/8868d8b6-e8a6-4c40-9bca-fb6ec2c24443.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1052.060809] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/8868d8b6-e8a6-4c40-9bca-fb6ec2c24443/8868d8b6-e8a6-4c40-9bca-fb6ec2c24443.vmdk to [datastore1] a0952fdf-5570-4112-bc4d-e9f9cee1599c/a0952fdf-5570-4112-bc4d-e9f9cee1599c.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1052.061106] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e6f8d3a4-0e8f-42a3-b4db-32a8b7a621ba {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.067686] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1052.067686] env[69328]: value = "task-3273905" [ 1052.067686] env[69328]: _type = "Task" [ 1052.067686] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.083222] env[69328]: DEBUG oslo_vmware.api [None req-b179d108-cdfc-4430-8ebc-3ed518c246d8 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273904, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.083222] env[69328]: DEBUG oslo_vmware.api [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Waiting for the task: (returnval){ [ 1052.083222] env[69328]: value = "task-3273906" [ 1052.083222] env[69328]: _type = "Task" [ 1052.083222] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.091676] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273905, 'name': CreateVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.096898] env[69328]: DEBUG oslo_vmware.api [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273902, 'name': PowerOnVM_Task} progress is 91%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.099506] env[69328]: DEBUG oslo_vmware.api [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273906, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.147794] env[69328]: DEBUG nova.objects.instance [None req-5959cfbe-e2bf-48b9-bbc3-e7597d70bc24 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lazy-loading 'pci_requests' on Instance uuid dc050589-e37a-4798-9532-df4ecfab7eb1 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1052.158683] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273897, 'name': CreateVM_Task, 'duration_secs': 1.853963} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.159423] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1052.160163] env[69328]: DEBUG oslo_concurrency.lockutils [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1052.160786] env[69328]: DEBUG oslo_concurrency.lockutils [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1052.160786] env[69328]: DEBUG oslo_concurrency.lockutils [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1052.161238] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db66e4d1-077f-4874-bcef-22c5e62506f3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.167111] env[69328]: DEBUG oslo_vmware.api [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for the task: (returnval){ [ 1052.167111] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52fa590f-85fd-5f09-722e-1f324fc9b835" [ 1052.167111] env[69328]: _type = "Task" [ 1052.167111] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.176565] env[69328]: DEBUG oslo_vmware.api [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52fa590f-85fd-5f09-722e-1f324fc9b835, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.503871] env[69328]: DEBUG nova.scheduler.client.report [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1052.577317] env[69328]: DEBUG oslo_vmware.api [None req-b179d108-cdfc-4430-8ebc-3ed518c246d8 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273904, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.586569] env[69328]: DEBUG oslo_concurrency.lockutils [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1052.601676] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273905, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.610143] env[69328]: DEBUG oslo_vmware.api [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273906, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.610518] env[69328]: DEBUG oslo_vmware.api [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273902, 'name': PowerOnVM_Task, 'duration_secs': 0.621611} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.610786] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 14521ee3-d749-48b4-aeec-23c94ca2cf9f] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1052.611098] env[69328]: INFO nova.compute.manager [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 14521ee3-d749-48b4-aeec-23c94ca2cf9f] Took 8.06 seconds to spawn the instance on the hypervisor. [ 1052.611312] env[69328]: DEBUG nova.compute.manager [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 14521ee3-d749-48b4-aeec-23c94ca2cf9f] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1052.612152] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab512c9b-1d6f-4f72-89db-385204b86af5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.653999] env[69328]: DEBUG nova.objects.base [None req-5959cfbe-e2bf-48b9-bbc3-e7597d70bc24 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=69328) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1052.654243] env[69328]: DEBUG nova.network.neutron [None req-5959cfbe-e2bf-48b9-bbc3-e7597d70bc24 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1052.680311] env[69328]: DEBUG oslo_vmware.api [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52fa590f-85fd-5f09-722e-1f324fc9b835, 'name': SearchDatastore_Task, 'duration_secs': 0.011485} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.680311] env[69328]: DEBUG oslo_concurrency.lockutils [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1052.680311] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1052.680311] env[69328]: DEBUG oslo_concurrency.lockutils [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1052.680559] env[69328]: DEBUG oslo_concurrency.lockutils [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1052.680603] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1052.680882] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b47ba249-938c-42b4-8bdf-4056f7a2a5d4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.699127] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1052.699389] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1052.700479] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce8a2044-2696-45ef-9771-bfbaa6360319 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.706588] env[69328]: DEBUG oslo_vmware.api [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for the task: (returnval){ [ 1052.706588] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]527a2f59-a658-f0de-8f43-737b3f4f734f" [ 1052.706588] env[69328]: _type = "Task" [ 1052.706588] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.719743] env[69328]: DEBUG oslo_vmware.api [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]527a2f59-a658-f0de-8f43-737b3f4f734f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.753709] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5959cfbe-e2bf-48b9-bbc3-e7597d70bc24 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "interface-dc050589-e37a-4798-9532-df4ecfab7eb1-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.114s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1052.880559] env[69328]: DEBUG nova.compute.manager [req-673e6277-5831-4a1b-9da6-75812ba2d296 req-dec5371d-6b95-46af-aac9-c319bf0ddbf8 service nova] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Received event network-vif-plugged-a3cab44b-0572-4007-bab9-e84ba084f70a {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1052.880827] env[69328]: DEBUG oslo_concurrency.lockutils [req-673e6277-5831-4a1b-9da6-75812ba2d296 req-dec5371d-6b95-46af-aac9-c319bf0ddbf8 service nova] Acquiring lock "76210566-12d7-4f6a-afa1-6329e87e0f85-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1052.881048] env[69328]: DEBUG oslo_concurrency.lockutils [req-673e6277-5831-4a1b-9da6-75812ba2d296 req-dec5371d-6b95-46af-aac9-c319bf0ddbf8 service nova] Lock "76210566-12d7-4f6a-afa1-6329e87e0f85-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1052.881220] env[69328]: DEBUG oslo_concurrency.lockutils [req-673e6277-5831-4a1b-9da6-75812ba2d296 req-dec5371d-6b95-46af-aac9-c319bf0ddbf8 service nova] Lock "76210566-12d7-4f6a-afa1-6329e87e0f85-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1052.881445] env[69328]: DEBUG nova.compute.manager [req-673e6277-5831-4a1b-9da6-75812ba2d296 req-dec5371d-6b95-46af-aac9-c319bf0ddbf8 service nova] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] No waiting events found dispatching network-vif-plugged-a3cab44b-0572-4007-bab9-e84ba084f70a {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1052.881546] env[69328]: WARNING nova.compute.manager [req-673e6277-5831-4a1b-9da6-75812ba2d296 req-dec5371d-6b95-46af-aac9-c319bf0ddbf8 service nova] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Received unexpected event network-vif-plugged-a3cab44b-0572-4007-bab9-e84ba084f70a for instance with vm_state shelved_offloaded and task_state spawning. [ 1052.881691] env[69328]: DEBUG nova.compute.manager [req-673e6277-5831-4a1b-9da6-75812ba2d296 req-dec5371d-6b95-46af-aac9-c319bf0ddbf8 service nova] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Received event network-changed-a3cab44b-0572-4007-bab9-e84ba084f70a {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1052.881847] env[69328]: DEBUG nova.compute.manager [req-673e6277-5831-4a1b-9da6-75812ba2d296 req-dec5371d-6b95-46af-aac9-c319bf0ddbf8 service nova] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Refreshing instance network info cache due to event network-changed-a3cab44b-0572-4007-bab9-e84ba084f70a. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1052.882042] env[69328]: DEBUG oslo_concurrency.lockutils [req-673e6277-5831-4a1b-9da6-75812ba2d296 req-dec5371d-6b95-46af-aac9-c319bf0ddbf8 service nova] Acquiring lock "refresh_cache-76210566-12d7-4f6a-afa1-6329e87e0f85" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1052.882298] env[69328]: DEBUG oslo_concurrency.lockutils [req-673e6277-5831-4a1b-9da6-75812ba2d296 req-dec5371d-6b95-46af-aac9-c319bf0ddbf8 service nova] Acquired lock "refresh_cache-76210566-12d7-4f6a-afa1-6329e87e0f85" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1052.882382] env[69328]: DEBUG nova.network.neutron [req-673e6277-5831-4a1b-9da6-75812ba2d296 req-dec5371d-6b95-46af-aac9-c319bf0ddbf8 service nova] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Refreshing network info cache for port a3cab44b-0572-4007-bab9-e84ba084f70a {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1053.010609] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.540s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1053.011308] env[69328]: DEBUG nova.compute.manager [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1053.014540] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c1b9677b-6ae4-4ff8-b72c-b37efc8cb7ac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.139s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1053.014814] env[69328]: DEBUG nova.objects.instance [None req-c1b9677b-6ae4-4ff8-b72c-b37efc8cb7ac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Lazy-loading 'resources' on Instance uuid 55d9ba65-e5c8-446a-a209-a840f30ff02c {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1053.074588] env[69328]: DEBUG oslo_vmware.api [None req-b179d108-cdfc-4430-8ebc-3ed518c246d8 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273904, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.085023] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273905, 'name': CreateVM_Task, 'duration_secs': 0.57925} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.085661] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1053.086041] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/83b1e553-81a0-4dcf-a9f7-df6e5e0289ab" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1053.086212] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Acquired lock "[datastore2] devstack-image-cache_base/83b1e553-81a0-4dcf-a9f7-df6e5e0289ab" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1053.086683] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/83b1e553-81a0-4dcf-a9f7-df6e5e0289ab" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1053.087022] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47ea0745-1e75-4879-bb7c-b5692fdde3e3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.097419] env[69328]: DEBUG oslo_vmware.api [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 1053.097419] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ed194e-9d94-2bfe-4682-78c87490aac8" [ 1053.097419] env[69328]: _type = "Task" [ 1053.097419] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.106752] env[69328]: DEBUG oslo_vmware.api [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273906, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.112931] env[69328]: DEBUG oslo_vmware.api [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ed194e-9d94-2bfe-4682-78c87490aac8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.133022] env[69328]: INFO nova.compute.manager [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 14521ee3-d749-48b4-aeec-23c94ca2cf9f] Took 31.92 seconds to build instance. [ 1053.218257] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "c1829dcf-3608-4955-bd50-eb9ee27d38e1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1053.218652] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "c1829dcf-3608-4955-bd50-eb9ee27d38e1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1053.226504] env[69328]: DEBUG oslo_vmware.api [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]527a2f59-a658-f0de-8f43-737b3f4f734f, 'name': SearchDatastore_Task, 'duration_secs': 0.097348} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.227634] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25df94cc-fe4e-48e8-9e69-99894423e129 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.233663] env[69328]: DEBUG oslo_vmware.api [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for the task: (returnval){ [ 1053.233663] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]523ca107-7fad-6931-9797-06b799a1fcd8" [ 1053.233663] env[69328]: _type = "Task" [ 1053.233663] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.244493] env[69328]: DEBUG oslo_vmware.api [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]523ca107-7fad-6931-9797-06b799a1fcd8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.518655] env[69328]: DEBUG nova.compute.utils [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1053.523055] env[69328]: DEBUG nova.compute.manager [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1053.523281] env[69328]: DEBUG nova.network.neutron [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1053.576342] env[69328]: DEBUG oslo_vmware.api [None req-b179d108-cdfc-4430-8ebc-3ed518c246d8 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273904, 'name': RemoveSnapshot_Task, 'duration_secs': 1.238731} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.580693] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b179d108-cdfc-4430-8ebc-3ed518c246d8 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Deleted Snapshot of the VM instance {{(pid=69328) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1053.584647] env[69328]: DEBUG nova.policy [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c1d18e6b9e284403a091afd2c3e31c1c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f357b5a9494b4849a83aa934c5d4e26b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 1053.599577] env[69328]: DEBUG oslo_vmware.api [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273906, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.612156] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Releasing lock "[datastore2] devstack-image-cache_base/83b1e553-81a0-4dcf-a9f7-df6e5e0289ab" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1053.612885] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Processing image 83b1e553-81a0-4dcf-a9f7-df6e5e0289ab {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1053.612885] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/83b1e553-81a0-4dcf-a9f7-df6e5e0289ab/83b1e553-81a0-4dcf-a9f7-df6e5e0289ab.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1053.612885] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Acquired lock "[datastore2] devstack-image-cache_base/83b1e553-81a0-4dcf-a9f7-df6e5e0289ab/83b1e553-81a0-4dcf-a9f7-df6e5e0289ab.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1053.613080] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1053.614034] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b29100ca-e7a9-48da-80fc-7aba4488f0f5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.623653] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1053.624383] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1053.625551] env[69328]: DEBUG nova.network.neutron [req-673e6277-5831-4a1b-9da6-75812ba2d296 req-dec5371d-6b95-46af-aac9-c319bf0ddbf8 service nova] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Updated VIF entry in instance network info cache for port a3cab44b-0572-4007-bab9-e84ba084f70a. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1053.625986] env[69328]: DEBUG nova.network.neutron [req-673e6277-5831-4a1b-9da6-75812ba2d296 req-dec5371d-6b95-46af-aac9-c319bf0ddbf8 service nova] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Updating instance_info_cache with network_info: [{"id": "a3cab44b-0572-4007-bab9-e84ba084f70a", "address": "fa:16:3e:02:7d:25", "network": {"id": "3be6b159-5d5c-4752-b79d-0ed6110569d0", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-915151552-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.128", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f50ac50ef6ae4abc83a8064746de7029", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe20ef0e-0991-44d7-887d-08dddac0b56b", "external-id": "nsx-vlan-transportzone-991", "segmentation_id": 991, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3cab44b-05", "ovs_interfaceid": "a3cab44b-0572-4007-bab9-e84ba084f70a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1053.627243] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3fab17f3-0c2a-4379-ba48-37ceee7c1162 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.640528] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b96f43b3-9d1d-4ce5-841b-8c0941398203 tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "14521ee3-d749-48b4-aeec-23c94ca2cf9f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.431s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1053.643995] env[69328]: DEBUG oslo_vmware.api [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 1053.643995] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52a3f53b-51c8-a4b5-1084-6103af1c8f02" [ 1053.643995] env[69328]: _type = "Task" [ 1053.643995] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.653913] env[69328]: DEBUG oslo_vmware.api [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52a3f53b-51c8-a4b5-1084-6103af1c8f02, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.728231] env[69328]: DEBUG nova.compute.manager [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1053.746976] env[69328]: DEBUG oslo_vmware.api [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]523ca107-7fad-6931-9797-06b799a1fcd8, 'name': SearchDatastore_Task, 'duration_secs': 0.098424} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.749643] env[69328]: DEBUG oslo_concurrency.lockutils [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1053.749952] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 071c1837-9d0b-4b69-b16e-991b300385fb/071c1837-9d0b-4b69-b16e-991b300385fb.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1053.750527] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cf495b5b-37db-4920-a38b-15f74de3e16e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.757899] env[69328]: DEBUG oslo_vmware.api [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for the task: (returnval){ [ 1053.757899] env[69328]: value = "task-3273907" [ 1053.757899] env[69328]: _type = "Task" [ 1053.757899] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.772792] env[69328]: DEBUG oslo_vmware.api [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3273907, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.959837] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e23bffe9-dad4-4f36-a745-3dc8c3d6105c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.969675] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccdd1651-d93b-45e0-864c-8d18b0629a58 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.004069] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9632017a-af77-4962-91f4-a6f7c36202e7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.011833] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-197b87c7-9a85-46a0-b8d4-422d7a072553 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.026206] env[69328]: DEBUG nova.compute.manager [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1054.029184] env[69328]: DEBUG nova.compute.provider_tree [None req-c1b9677b-6ae4-4ff8-b72c-b37efc8cb7ac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1054.090300] env[69328]: WARNING nova.compute.manager [None req-b179d108-cdfc-4430-8ebc-3ed518c246d8 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Image not found during snapshot: nova.exception.ImageNotFound: Image 6ca2a71d-1fd8-4fbe-874f-4228a2a0dfdc could not be found. [ 1054.102056] env[69328]: DEBUG oslo_vmware.api [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273906, 'name': CopyVirtualDisk_Task} progress is 85%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.133626] env[69328]: DEBUG oslo_concurrency.lockutils [req-673e6277-5831-4a1b-9da6-75812ba2d296 req-dec5371d-6b95-46af-aac9-c319bf0ddbf8 service nova] Releasing lock "refresh_cache-76210566-12d7-4f6a-afa1-6329e87e0f85" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1054.157092] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Preparing fetch location {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1054.157355] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Fetch image to [datastore2] OSTACK_IMG_abfabe8c-b9ab-46bf-924a-f5186665a333/OSTACK_IMG_abfabe8c-b9ab-46bf-924a-f5186665a333.vmdk {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1054.157545] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Downloading stream optimized image 83b1e553-81a0-4dcf-a9f7-df6e5e0289ab to [datastore2] OSTACK_IMG_abfabe8c-b9ab-46bf-924a-f5186665a333/OSTACK_IMG_abfabe8c-b9ab-46bf-924a-f5186665a333.vmdk on the data store datastore2 as vApp {{(pid=69328) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1054.157991] env[69328]: DEBUG nova.virt.vmwareapi.images [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Downloading image file data 83b1e553-81a0-4dcf-a9f7-df6e5e0289ab to the ESX as VM named 'OSTACK_IMG_abfabe8c-b9ab-46bf-924a-f5186665a333' {{(pid=69328) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1054.256747] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1054.259541] env[69328]: DEBUG oslo_vmware.rw_handles [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1054.259541] env[69328]: value = "resgroup-9" [ 1054.259541] env[69328]: _type = "ResourcePool" [ 1054.259541] env[69328]: }. {{(pid=69328) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1054.259922] env[69328]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-927a5bc0-412c-40f0-823f-414ba91f6edd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.286673] env[69328]: DEBUG oslo_vmware.rw_handles [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lease: (returnval){ [ 1054.286673] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]520dfb07-876e-0eb3-110a-86518dd3bd66" [ 1054.286673] env[69328]: _type = "HttpNfcLease" [ 1054.286673] env[69328]: } obtained for vApp import into resource pool (val){ [ 1054.286673] env[69328]: value = "resgroup-9" [ 1054.286673] env[69328]: _type = "ResourcePool" [ 1054.286673] env[69328]: }. {{(pid=69328) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1054.286927] env[69328]: DEBUG oslo_vmware.api [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the lease: (returnval){ [ 1054.286927] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]520dfb07-876e-0eb3-110a-86518dd3bd66" [ 1054.286927] env[69328]: _type = "HttpNfcLease" [ 1054.286927] env[69328]: } to be ready. {{(pid=69328) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1054.290547] env[69328]: DEBUG oslo_vmware.api [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3273907, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.299816] env[69328]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1054.299816] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]520dfb07-876e-0eb3-110a-86518dd3bd66" [ 1054.299816] env[69328]: _type = "HttpNfcLease" [ 1054.299816] env[69328]: } is initializing. {{(pid=69328) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1054.322042] env[69328]: DEBUG nova.network.neutron [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Successfully created port: d779425b-180c-47fd-b307-e02e14f18a26 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1054.536051] env[69328]: DEBUG nova.scheduler.client.report [None req-c1b9677b-6ae4-4ff8-b72c-b37efc8cb7ac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1054.610592] env[69328]: DEBUG oslo_vmware.api [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273906, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.395093} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.610928] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/8868d8b6-e8a6-4c40-9bca-fb6ec2c24443/8868d8b6-e8a6-4c40-9bca-fb6ec2c24443.vmdk to [datastore1] a0952fdf-5570-4112-bc4d-e9f9cee1599c/a0952fdf-5570-4112-bc4d-e9f9cee1599c.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1054.612062] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80332c1b-6d8e-48b7-abab-292c6bc2d540 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.648340] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Reconfiguring VM instance instance-00000045 to attach disk [datastore1] a0952fdf-5570-4112-bc4d-e9f9cee1599c/a0952fdf-5570-4112-bc4d-e9f9cee1599c.vmdk or device None with type streamOptimized {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1054.648722] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-066f5b11-98e7-4493-a895-16625996143b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.672955] env[69328]: DEBUG oslo_vmware.api [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Waiting for the task: (returnval){ [ 1054.672955] env[69328]: value = "task-3273909" [ 1054.672955] env[69328]: _type = "Task" [ 1054.672955] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.685988] env[69328]: DEBUG oslo_vmware.api [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273909, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.772376] env[69328]: DEBUG oslo_vmware.api [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3273907, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.799233] env[69328]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1054.799233] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]520dfb07-876e-0eb3-110a-86518dd3bd66" [ 1054.799233] env[69328]: _type = "HttpNfcLease" [ 1054.799233] env[69328]: } is initializing. {{(pid=69328) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1054.938377] env[69328]: INFO nova.compute.manager [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Rebuilding instance [ 1054.986397] env[69328]: DEBUG nova.compute.manager [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1054.987385] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7573aaa8-11c3-4502-86f3-8e16a88e8331 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.040694] env[69328]: DEBUG nova.compute.manager [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1055.043390] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c1b9677b-6ae4-4ff8-b72c-b37efc8cb7ac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.029s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1055.045912] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 15.790s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1055.049103] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b59ab517-cfd7-4d65-808b-6c75bd5d9991 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "interface-dc050589-e37a-4798-9532-df4ecfab7eb1-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1055.049353] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b59ab517-cfd7-4d65-808b-6c75bd5d9991 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "interface-dc050589-e37a-4798-9532-df4ecfab7eb1-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1055.049655] env[69328]: DEBUG nova.objects.instance [None req-b59ab517-cfd7-4d65-808b-6c75bd5d9991 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lazy-loading 'flavor' on Instance uuid dc050589-e37a-4798-9532-df4ecfab7eb1 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1055.064060] env[69328]: INFO nova.scheduler.client.report [None req-c1b9677b-6ae4-4ff8-b72c-b37efc8cb7ac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Deleted allocations for instance 55d9ba65-e5c8-446a-a209-a840f30ff02c [ 1055.069304] env[69328]: DEBUG nova.virt.hardware [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1055.069304] env[69328]: DEBUG nova.virt.hardware [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1055.069668] env[69328]: DEBUG nova.virt.hardware [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1055.069668] env[69328]: DEBUG nova.virt.hardware [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1055.069810] env[69328]: DEBUG nova.virt.hardware [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1055.070639] env[69328]: DEBUG nova.virt.hardware [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1055.071281] env[69328]: DEBUG nova.virt.hardware [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1055.071281] env[69328]: DEBUG nova.virt.hardware [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1055.071493] env[69328]: DEBUG nova.virt.hardware [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1055.071709] env[69328]: DEBUG nova.virt.hardware [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1055.071936] env[69328]: DEBUG nova.virt.hardware [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1055.075595] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27459f98-2709-4517-ad71-99cf3cfdb130 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.085772] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92147307-9795-42f3-b738-6a1d4fb2bd53 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.183978] env[69328]: DEBUG oslo_vmware.api [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273909, 'name': ReconfigVM_Task, 'duration_secs': 0.500762} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.183978] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Reconfigured VM instance instance-00000045 to attach disk [datastore1] a0952fdf-5570-4112-bc4d-e9f9cee1599c/a0952fdf-5570-4112-bc4d-e9f9cee1599c.vmdk or device None with type streamOptimized {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1055.184411] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-adf2fffe-d803-47e5-b215-af50025b8ad7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.192058] env[69328]: DEBUG oslo_vmware.api [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Waiting for the task: (returnval){ [ 1055.192058] env[69328]: value = "task-3273910" [ 1055.192058] env[69328]: _type = "Task" [ 1055.192058] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.199680] env[69328]: DEBUG oslo_vmware.api [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273910, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.270913] env[69328]: DEBUG oslo_vmware.api [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3273907, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.105888} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.270913] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 071c1837-9d0b-4b69-b16e-991b300385fb/071c1837-9d0b-4b69-b16e-991b300385fb.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1055.270913] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1055.271189] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-612a539f-6dc4-487b-a149-e978f61890d4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.277843] env[69328]: DEBUG oslo_vmware.api [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for the task: (returnval){ [ 1055.277843] env[69328]: value = "task-3273911" [ 1055.277843] env[69328]: _type = "Task" [ 1055.277843] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.285767] env[69328]: DEBUG oslo_vmware.api [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3273911, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.297735] env[69328]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1055.297735] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]520dfb07-876e-0eb3-110a-86518dd3bd66" [ 1055.297735] env[69328]: _type = "HttpNfcLease" [ 1055.297735] env[69328]: } is ready. {{(pid=69328) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1055.298046] env[69328]: DEBUG oslo_vmware.rw_handles [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1055.298046] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]520dfb07-876e-0eb3-110a-86518dd3bd66" [ 1055.298046] env[69328]: _type = "HttpNfcLease" [ 1055.298046] env[69328]: }. {{(pid=69328) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1055.298776] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e907077-3bb3-4403-a220-c07ec91b5cdb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.305970] env[69328]: DEBUG oslo_vmware.rw_handles [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52224333-c781-3245-ac63-f9f4f9c617e4/disk-0.vmdk from lease info. {{(pid=69328) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1055.306188] env[69328]: DEBUG oslo_vmware.rw_handles [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Creating HTTP connection to write to file with size = 31668224 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52224333-c781-3245-ac63-f9f4f9c617e4/disk-0.vmdk. {{(pid=69328) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1055.372697] env[69328]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-04be1df6-85c6-4979-8cba-67821eca5317 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.551252] env[69328]: INFO nova.compute.claims [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1055.582215] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c1b9677b-6ae4-4ff8-b72c-b37efc8cb7ac tempest-ServersTestManualDisk-2078956123 tempest-ServersTestManualDisk-2078956123-project-member] Lock "55d9ba65-e5c8-446a-a209-a840f30ff02c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.011s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1055.621538] env[69328]: DEBUG nova.objects.instance [None req-b59ab517-cfd7-4d65-808b-6c75bd5d9991 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lazy-loading 'pci_requests' on Instance uuid dc050589-e37a-4798-9532-df4ecfab7eb1 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1055.702804] env[69328]: DEBUG oslo_vmware.api [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273910, 'name': Rename_Task, 'duration_secs': 0.493132} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.703110] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1055.703375] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e120af72-456d-451b-a9af-c53884ed67fe {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.709750] env[69328]: DEBUG oslo_vmware.api [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Waiting for the task: (returnval){ [ 1055.709750] env[69328]: value = "task-3273912" [ 1055.709750] env[69328]: _type = "Task" [ 1055.709750] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.718608] env[69328]: DEBUG oslo_vmware.api [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273912, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.788209] env[69328]: DEBUG oslo_vmware.api [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3273911, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067436} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.788495] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1055.789295] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e631ed2-6032-4c13-b4ec-fc5f676eaef9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.810796] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] 071c1837-9d0b-4b69-b16e-991b300385fb/071c1837-9d0b-4b69-b16e-991b300385fb.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1055.811337] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-20efd073-6de3-48d2-9405-ea378385c15a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.830426] env[69328]: DEBUG oslo_vmware.api [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for the task: (returnval){ [ 1055.830426] env[69328]: value = "task-3273913" [ 1055.830426] env[69328]: _type = "Task" [ 1055.830426] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.838378] env[69328]: DEBUG oslo_vmware.api [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3273913, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.006516] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1056.006845] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5ba14e30-2047-4ef6-9659-9df91f1c8601 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.015164] env[69328]: DEBUG oslo_vmware.api [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 1056.015164] env[69328]: value = "task-3273914" [ 1056.015164] env[69328]: _type = "Task" [ 1056.015164] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.024010] env[69328]: DEBUG oslo_vmware.api [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273914, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.047124] env[69328]: DEBUG nova.network.neutron [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Successfully updated port: d779425b-180c-47fd-b307-e02e14f18a26 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1056.060274] env[69328]: INFO nova.compute.resource_tracker [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Updating resource usage from migration 65843494-d4bc-40c8-866b-e1e3d3443745 [ 1056.126016] env[69328]: DEBUG nova.objects.base [None req-b59ab517-cfd7-4d65-808b-6c75bd5d9991 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=69328) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1056.126110] env[69328]: DEBUG nova.network.neutron [None req-b59ab517-cfd7-4d65-808b-6c75bd5d9991 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1056.170746] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c83f3c34-5732-4221-98a5-17e4bc0a9442 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquiring lock "6ad357d9-c35a-4fdb-8dd0-39a0617bf85e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1056.171016] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c83f3c34-5732-4221-98a5-17e4bc0a9442 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lock "6ad357d9-c35a-4fdb-8dd0-39a0617bf85e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1056.171226] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c83f3c34-5732-4221-98a5-17e4bc0a9442 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquiring lock "6ad357d9-c35a-4fdb-8dd0-39a0617bf85e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1056.171476] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c83f3c34-5732-4221-98a5-17e4bc0a9442 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lock "6ad357d9-c35a-4fdb-8dd0-39a0617bf85e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1056.171672] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c83f3c34-5732-4221-98a5-17e4bc0a9442 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lock "6ad357d9-c35a-4fdb-8dd0-39a0617bf85e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1056.176057] env[69328]: INFO nova.compute.manager [None req-c83f3c34-5732-4221-98a5-17e4bc0a9442 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Terminating instance [ 1056.192275] env[69328]: DEBUG nova.policy [None req-b59ab517-cfd7-4d65-808b-6c75bd5d9991 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '69ca01fd1d0f42b0b05a5426da9753ad', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '30209bc93a4042488f15c73b7e4733d5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 1056.226147] env[69328]: DEBUG oslo_vmware.api [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273912, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.341218] env[69328]: DEBUG oslo_vmware.api [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3273913, 'name': ReconfigVM_Task, 'duration_secs': 0.300351} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.344729] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Reconfigured VM instance instance-00000065 to attach disk [datastore1] 071c1837-9d0b-4b69-b16e-991b300385fb/071c1837-9d0b-4b69-b16e-991b300385fb.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1056.346703] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0f683bf6-4bd0-45be-a4dd-430c006efde6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.349559] env[69328]: DEBUG nova.compute.manager [req-5ce8b88c-2fdc-40aa-8171-86b79b328f10 req-9b9b0f05-40ff-45ee-9a4a-0d15f88a4b5e service nova] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Received event network-vif-plugged-d779425b-180c-47fd-b307-e02e14f18a26 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1056.349773] env[69328]: DEBUG oslo_concurrency.lockutils [req-5ce8b88c-2fdc-40aa-8171-86b79b328f10 req-9b9b0f05-40ff-45ee-9a4a-0d15f88a4b5e service nova] Acquiring lock "dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1056.349981] env[69328]: DEBUG oslo_concurrency.lockutils [req-5ce8b88c-2fdc-40aa-8171-86b79b328f10 req-9b9b0f05-40ff-45ee-9a4a-0d15f88a4b5e service nova] Lock "dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1056.350166] env[69328]: DEBUG oslo_concurrency.lockutils [req-5ce8b88c-2fdc-40aa-8171-86b79b328f10 req-9b9b0f05-40ff-45ee-9a4a-0d15f88a4b5e service nova] Lock "dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1056.350328] env[69328]: DEBUG nova.compute.manager [req-5ce8b88c-2fdc-40aa-8171-86b79b328f10 req-9b9b0f05-40ff-45ee-9a4a-0d15f88a4b5e service nova] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] No waiting events found dispatching network-vif-plugged-d779425b-180c-47fd-b307-e02e14f18a26 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1056.350498] env[69328]: WARNING nova.compute.manager [req-5ce8b88c-2fdc-40aa-8171-86b79b328f10 req-9b9b0f05-40ff-45ee-9a4a-0d15f88a4b5e service nova] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Received unexpected event network-vif-plugged-d779425b-180c-47fd-b307-e02e14f18a26 for instance with vm_state building and task_state spawning. [ 1056.359574] env[69328]: DEBUG oslo_vmware.api [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for the task: (returnval){ [ 1056.359574] env[69328]: value = "task-3273915" [ 1056.359574] env[69328]: _type = "Task" [ 1056.359574] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.371771] env[69328]: DEBUG oslo_vmware.api [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3273915, 'name': Rename_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.509378] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b12d0d8-adac-4004-be2f-8c4048bd8cac {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.521695] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28e3f59f-3481-488b-92fd-4f489a678ea9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.535575] env[69328]: DEBUG oslo_vmware.api [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273914, 'name': PowerOffVM_Task, 'duration_secs': 0.213244} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.562916] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1056.563884] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1056.564197] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "refresh_cache-dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1056.564335] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquired lock "refresh_cache-dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1056.564480] env[69328]: DEBUG nova.network.neutron [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1056.573721] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ae82e6f-2de2-4edd-b5b6-94dcec7c94df {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.577262] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5874413e-9f43-4795-8196-0ac3a13acee1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.594468] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72dcbac1-03dd-42e0-a476-656d551c6404 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.598935] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1056.602673] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-42d2cf44-71ed-4c93-afdc-34fe9a4b070a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.616706] env[69328]: DEBUG nova.compute.provider_tree [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1056.622988] env[69328]: DEBUG nova.network.neutron [None req-b59ab517-cfd7-4d65-808b-6c75bd5d9991 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Successfully created port: 2febedad-c6fa-48cf-893b-6baa5b6ddcd6 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1056.626936] env[69328]: DEBUG nova.network.neutron [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1056.673062] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1056.673062] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1056.673062] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Deleting the datastore file [datastore1] 65e38a02-880b-46e2-8866-645a9fc17c7a {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1056.673062] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-05dfbfae-5f24-44d3-b6f8-eecf1e2a736c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.684582] env[69328]: DEBUG nova.compute.manager [None req-c83f3c34-5732-4221-98a5-17e4bc0a9442 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1056.685052] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c83f3c34-5732-4221-98a5-17e4bc0a9442 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1056.686282] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b4e332f-a623-45d5-96d3-16ed326830ad {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.691380] env[69328]: DEBUG oslo_vmware.api [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 1056.691380] env[69328]: value = "task-3273917" [ 1056.691380] env[69328]: _type = "Task" [ 1056.691380] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.701077] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-c83f3c34-5732-4221-98a5-17e4bc0a9442 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1056.701784] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-697169d9-a978-47ae-9566-c71a146ea1e5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.706923] env[69328]: DEBUG oslo_vmware.api [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273917, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.712867] env[69328]: DEBUG oslo_vmware.api [None req-c83f3c34-5732-4221-98a5-17e4bc0a9442 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 1056.712867] env[69328]: value = "task-3273918" [ 1056.712867] env[69328]: _type = "Task" [ 1056.712867] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.730554] env[69328]: DEBUG oslo_vmware.api [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273912, 'name': PowerOnVM_Task, 'duration_secs': 0.739886} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.735559] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1056.737702] env[69328]: DEBUG oslo_vmware.api [None req-c83f3c34-5732-4221-98a5-17e4bc0a9442 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273918, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.816900] env[69328]: DEBUG nova.network.neutron [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Updating instance_info_cache with network_info: [{"id": "d779425b-180c-47fd-b307-e02e14f18a26", "address": "fa:16:3e:47:6b:17", "network": {"id": "4031def8-0553-461f-9528-aa338b9d7b2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1834835647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f357b5a9494b4849a83aa934c5d4e26b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "357d2811-e990-4985-9f9e-b158d10d3699", "external-id": "nsx-vlan-transportzone-641", "segmentation_id": 641, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd779425b-18", "ovs_interfaceid": "d779425b-180c-47fd-b307-e02e14f18a26", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1056.866031] env[69328]: DEBUG nova.compute.manager [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1056.867178] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b42f32f-3414-47de-aa0c-86bcd98106b5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.881381] env[69328]: DEBUG oslo_vmware.api [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3273915, 'name': Rename_Task, 'duration_secs': 0.167627} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.882232] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1056.882232] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2f3c4f7b-2770-4bbf-b7bf-900f1126b430 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.888575] env[69328]: DEBUG oslo_vmware.api [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for the task: (returnval){ [ 1056.888575] env[69328]: value = "task-3273919" [ 1056.888575] env[69328]: _type = "Task" [ 1056.888575] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.897175] env[69328]: DEBUG oslo_vmware.api [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3273919, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.124261] env[69328]: DEBUG nova.scheduler.client.report [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1057.204651] env[69328]: DEBUG oslo_vmware.api [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273917, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.170779} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.204968] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1057.205184] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1057.205345] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1057.225316] env[69328]: DEBUG oslo_vmware.api [None req-c83f3c34-5732-4221-98a5-17e4bc0a9442 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273918, 'name': PowerOffVM_Task, 'duration_secs': 0.272052} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.225585] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-c83f3c34-5732-4221-98a5-17e4bc0a9442 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1057.225758] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c83f3c34-5732-4221-98a5-17e4bc0a9442 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1057.226024] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-67e36652-89db-4760-9f6e-d4bfe0a91993 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.311678] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c83f3c34-5732-4221-98a5-17e4bc0a9442 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1057.311978] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c83f3c34-5732-4221-98a5-17e4bc0a9442 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1057.312152] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-c83f3c34-5732-4221-98a5-17e4bc0a9442 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Deleting the datastore file [datastore2] 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1057.312499] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dfc5c0de-3779-4a16-89c8-c2948ca7d342 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.319387] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Releasing lock "refresh_cache-dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1057.319691] env[69328]: DEBUG nova.compute.manager [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Instance network_info: |[{"id": "d779425b-180c-47fd-b307-e02e14f18a26", "address": "fa:16:3e:47:6b:17", "network": {"id": "4031def8-0553-461f-9528-aa338b9d7b2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1834835647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f357b5a9494b4849a83aa934c5d4e26b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "357d2811-e990-4985-9f9e-b158d10d3699", "external-id": "nsx-vlan-transportzone-641", "segmentation_id": 641, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd779425b-18", "ovs_interfaceid": "d779425b-180c-47fd-b307-e02e14f18a26", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1057.320103] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:47:6b:17', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '357d2811-e990-4985-9f9e-b158d10d3699', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd779425b-180c-47fd-b307-e02e14f18a26', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1057.328535] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1057.331825] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1057.332157] env[69328]: DEBUG oslo_vmware.api [None req-c83f3c34-5732-4221-98a5-17e4bc0a9442 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for the task: (returnval){ [ 1057.332157] env[69328]: value = "task-3273921" [ 1057.332157] env[69328]: _type = "Task" [ 1057.332157] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.332347] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e31fb776-83a8-4f5f-8d45-9f184a352e4d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.360250] env[69328]: DEBUG oslo_vmware.api [None req-c83f3c34-5732-4221-98a5-17e4bc0a9442 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273921, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.365716] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1057.365716] env[69328]: value = "task-3273922" [ 1057.365716] env[69328]: _type = "Task" [ 1057.365716] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.378458] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273922, 'name': CreateVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.394302] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d8312b11-704f-44de-8694-e9dcdc949eee tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Lock "a0952fdf-5570-4112-bc4d-e9f9cee1599c" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 41.288s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1057.403246] env[69328]: DEBUG oslo_vmware.api [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3273919, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.432026] env[69328]: DEBUG oslo_vmware.rw_handles [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Completed reading data from the image iterator. {{(pid=69328) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1057.432291] env[69328]: DEBUG oslo_vmware.rw_handles [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52224333-c781-3245-ac63-f9f4f9c617e4/disk-0.vmdk. {{(pid=69328) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1057.433624] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c77a90e2-1ea4-4a30-a481-cb2a941c9c75 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.439878] env[69328]: DEBUG oslo_vmware.rw_handles [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52224333-c781-3245-ac63-f9f4f9c617e4/disk-0.vmdk is in state: ready. {{(pid=69328) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1057.440202] env[69328]: DEBUG oslo_vmware.rw_handles [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52224333-c781-3245-ac63-f9f4f9c617e4/disk-0.vmdk. {{(pid=69328) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1057.440317] env[69328]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-5b26abf8-4d6c-48a9-b31d-0e50548229a8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.629018] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.583s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1057.629355] env[69328]: INFO nova.compute.manager [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Migrating [ 1057.636826] env[69328]: DEBUG oslo_concurrency.lockutils [None req-427d6a49-3d5a-4df6-8145-722a95c1f0b6 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.792s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1057.636826] env[69328]: DEBUG nova.objects.instance [None req-427d6a49-3d5a-4df6-8145-722a95c1f0b6 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Lazy-loading 'resources' on Instance uuid 19f537b7-90fc-4832-b137-e042e00a508b {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1057.802023] env[69328]: DEBUG oslo_vmware.rw_handles [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52224333-c781-3245-ac63-f9f4f9c617e4/disk-0.vmdk. {{(pid=69328) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1057.802023] env[69328]: INFO nova.virt.vmwareapi.images [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Downloaded image file data 83b1e553-81a0-4dcf-a9f7-df6e5e0289ab [ 1057.802649] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96f1df26-d082-4a02-8474-db602581acbd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.822040] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7f1a7225-2eb9-456f-b97e-446dd847e76b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.849136] env[69328]: INFO nova.virt.vmwareapi.images [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] The imported VM was unregistered [ 1057.849990] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Caching image {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1057.850333] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Creating directory with path [datastore2] devstack-image-cache_base/83b1e553-81a0-4dcf-a9f7-df6e5e0289ab {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1057.851540] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8e2f3180-bb3e-4bcd-87c2-62c4fd9ac4ae {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.862630] env[69328]: DEBUG oslo_vmware.api [None req-c83f3c34-5732-4221-98a5-17e4bc0a9442 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Task: {'id': task-3273921, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.24056} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.863023] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-c83f3c34-5732-4221-98a5-17e4bc0a9442 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1057.863332] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c83f3c34-5732-4221-98a5-17e4bc0a9442 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1057.863626] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c83f3c34-5732-4221-98a5-17e4bc0a9442 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1057.863924] env[69328]: INFO nova.compute.manager [None req-c83f3c34-5732-4221-98a5-17e4bc0a9442 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1057.864283] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c83f3c34-5732-4221-98a5-17e4bc0a9442 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1057.864806] env[69328]: DEBUG nova.compute.manager [-] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1057.865029] env[69328]: DEBUG nova.network.neutron [-] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1057.871457] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Created directory with path [datastore2] devstack-image-cache_base/83b1e553-81a0-4dcf-a9f7-df6e5e0289ab {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1057.871768] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_abfabe8c-b9ab-46bf-924a-f5186665a333/OSTACK_IMG_abfabe8c-b9ab-46bf-924a-f5186665a333.vmdk to [datastore2] devstack-image-cache_base/83b1e553-81a0-4dcf-a9f7-df6e5e0289ab/83b1e553-81a0-4dcf-a9f7-df6e5e0289ab.vmdk. {{(pid=69328) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1057.872446] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-d93b34d7-14c6-419a-98c5-0803d7ba17f4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.877712] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273922, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.881959] env[69328]: DEBUG oslo_vmware.api [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 1057.881959] env[69328]: value = "task-3273924" [ 1057.881959] env[69328]: _type = "Task" [ 1057.881959] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.890144] env[69328]: DEBUG oslo_vmware.api [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273924, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.898295] env[69328]: DEBUG oslo_vmware.api [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3273919, 'name': PowerOnVM_Task, 'duration_secs': 0.526603} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.898660] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1057.898989] env[69328]: INFO nova.compute.manager [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Took 10.69 seconds to spawn the instance on the hypervisor. [ 1057.899299] env[69328]: DEBUG nova.compute.manager [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1057.900156] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b172427-2353-4002-8aef-87d8bf61ba37 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.148022] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "refresh_cache-204286d7-c806-48cb-85e9-b2a78571777c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1058.148022] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquired lock "refresh_cache-204286d7-c806-48cb-85e9-b2a78571777c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1058.148022] env[69328]: DEBUG nova.network.neutron [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1058.252118] env[69328]: DEBUG nova.virt.hardware [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1058.252118] env[69328]: DEBUG nova.virt.hardware [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1058.252118] env[69328]: DEBUG nova.virt.hardware [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1058.252118] env[69328]: DEBUG nova.virt.hardware [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1058.252118] env[69328]: DEBUG nova.virt.hardware [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1058.252674] env[69328]: DEBUG nova.virt.hardware [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1058.253086] env[69328]: DEBUG nova.virt.hardware [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1058.253410] env[69328]: DEBUG nova.virt.hardware [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1058.253718] env[69328]: DEBUG nova.virt.hardware [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1058.254291] env[69328]: DEBUG nova.virt.hardware [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1058.254586] env[69328]: DEBUG nova.virt.hardware [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1058.255603] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9fe7d23-291a-4181-a7f9-fbfddd9181a6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.268704] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2f43693-c475-4630-a016-188885cdcbcb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.287211] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:db:a0:8f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '62237242-7ce2-4664-a1c5-6783b516b507', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dce2dda5-86c0-4f69-a3c1-c0f7a5c3b56e', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1058.295425] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1058.298571] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1058.302016] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e1008b7e-7b80-4ca6-89d6-2fa1e3a698b8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.323814] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1058.323814] env[69328]: value = "task-3273925" [ 1058.323814] env[69328]: _type = "Task" [ 1058.323814] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.332815] env[69328]: DEBUG oslo_concurrency.lockutils [None req-81971890-40ec-481a-8320-72567755728d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "14521ee3-d749-48b4-aeec-23c94ca2cf9f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1058.333653] env[69328]: DEBUG oslo_concurrency.lockutils [None req-81971890-40ec-481a-8320-72567755728d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "14521ee3-d749-48b4-aeec-23c94ca2cf9f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1058.333653] env[69328]: DEBUG oslo_concurrency.lockutils [None req-81971890-40ec-481a-8320-72567755728d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "14521ee3-d749-48b4-aeec-23c94ca2cf9f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1058.333653] env[69328]: DEBUG oslo_concurrency.lockutils [None req-81971890-40ec-481a-8320-72567755728d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "14521ee3-d749-48b4-aeec-23c94ca2cf9f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1058.333653] env[69328]: DEBUG oslo_concurrency.lockutils [None req-81971890-40ec-481a-8320-72567755728d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "14521ee3-d749-48b4-aeec-23c94ca2cf9f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1058.335115] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273925, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.340065] env[69328]: INFO nova.compute.manager [None req-81971890-40ec-481a-8320-72567755728d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 14521ee3-d749-48b4-aeec-23c94ca2cf9f] Terminating instance [ 1058.372075] env[69328]: DEBUG nova.network.neutron [None req-b59ab517-cfd7-4d65-808b-6c75bd5d9991 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Successfully updated port: 2febedad-c6fa-48cf-893b-6baa5b6ddcd6 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1058.379770] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273922, 'name': CreateVM_Task, 'duration_secs': 0.904846} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.383020] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1058.384487] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1058.384631] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1058.384980] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1058.389634] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d058a516-d603-49b7-8ac3-f0b86a4ef6e6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.397781] env[69328]: DEBUG oslo_vmware.api [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273924, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.402540] env[69328]: DEBUG oslo_vmware.api [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1058.402540] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52060e39-e245-e5c3-fda6-9793f332f6d5" [ 1058.402540] env[69328]: _type = "Task" [ 1058.402540] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.416369] env[69328]: DEBUG oslo_vmware.api [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52060e39-e245-e5c3-fda6-9793f332f6d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.418502] env[69328]: INFO nova.compute.manager [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Took 31.78 seconds to build instance. [ 1058.610875] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b0667f0-829d-4c58-9c65-d2e925f963be {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.618365] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24939e43-2b1c-4c37-94aa-98ef627312fd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.656537] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe540072-39cb-43a7-aefc-e8d2a4d2e35e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.666958] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e32383f4-0a1a-40d9-bba1-eeb81d9b4458 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.684887] env[69328]: DEBUG nova.compute.provider_tree [None req-427d6a49-3d5a-4df6-8145-722a95c1f0b6 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1058.831968] env[69328]: DEBUG nova.compute.manager [req-9a83c68c-8f25-42d8-92e5-0a7687a25ae6 req-d186c53a-6bcd-43e6-9305-0da7918e7c6e service nova] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Received event network-changed-d779425b-180c-47fd-b307-e02e14f18a26 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1058.832287] env[69328]: DEBUG nova.compute.manager [req-9a83c68c-8f25-42d8-92e5-0a7687a25ae6 req-d186c53a-6bcd-43e6-9305-0da7918e7c6e service nova] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Refreshing instance network info cache due to event network-changed-d779425b-180c-47fd-b307-e02e14f18a26. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1058.832521] env[69328]: DEBUG oslo_concurrency.lockutils [req-9a83c68c-8f25-42d8-92e5-0a7687a25ae6 req-d186c53a-6bcd-43e6-9305-0da7918e7c6e service nova] Acquiring lock "refresh_cache-dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1058.832705] env[69328]: DEBUG oslo_concurrency.lockutils [req-9a83c68c-8f25-42d8-92e5-0a7687a25ae6 req-d186c53a-6bcd-43e6-9305-0da7918e7c6e service nova] Acquired lock "refresh_cache-dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1058.832953] env[69328]: DEBUG nova.network.neutron [req-9a83c68c-8f25-42d8-92e5-0a7687a25ae6 req-d186c53a-6bcd-43e6-9305-0da7918e7c6e service nova] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Refreshing network info cache for port d779425b-180c-47fd-b307-e02e14f18a26 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1058.838184] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273925, 'name': CreateVM_Task} progress is 25%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.844285] env[69328]: DEBUG nova.compute.manager [None req-81971890-40ec-481a-8320-72567755728d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 14521ee3-d749-48b4-aeec-23c94ca2cf9f] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1058.844513] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-81971890-40ec-481a-8320-72567755728d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 14521ee3-d749-48b4-aeec-23c94ca2cf9f] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1058.845665] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a51763b-40c0-4365-b909-e33663c190c2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.855679] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-81971890-40ec-481a-8320-72567755728d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 14521ee3-d749-48b4-aeec-23c94ca2cf9f] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1058.856156] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b6c128f0-4880-478e-b9a5-0ab5fc27bca6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.865103] env[69328]: DEBUG oslo_vmware.api [None req-81971890-40ec-481a-8320-72567755728d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 1058.865103] env[69328]: value = "task-3273926" [ 1058.865103] env[69328]: _type = "Task" [ 1058.865103] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.876539] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b59ab517-cfd7-4d65-808b-6c75bd5d9991 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "refresh_cache-dc050589-e37a-4798-9532-df4ecfab7eb1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1058.876539] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b59ab517-cfd7-4d65-808b-6c75bd5d9991 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquired lock "refresh_cache-dc050589-e37a-4798-9532-df4ecfab7eb1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1058.876539] env[69328]: DEBUG nova.network.neutron [None req-b59ab517-cfd7-4d65-808b-6c75bd5d9991 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1058.878083] env[69328]: DEBUG oslo_vmware.api [None req-81971890-40ec-481a-8320-72567755728d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273926, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.895831] env[69328]: DEBUG oslo_vmware.api [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273924, 'name': MoveVirtualDisk_Task} progress is 18%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.918029] env[69328]: DEBUG oslo_vmware.api [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52060e39-e245-e5c3-fda6-9793f332f6d5, 'name': SearchDatastore_Task, 'duration_secs': 0.016859} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.918029] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1058.918029] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1058.918029] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1058.918029] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1058.918029] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1058.918029] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-96a85717-cd64-4309-804a-30d03113b8d8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.919953] env[69328]: DEBUG oslo_concurrency.lockutils [None req-336e6ebc-e56d-4cb0-a48d-f637c1b7eab1 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "071c1837-9d0b-4b69-b16e-991b300385fb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.288s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1058.939464] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1058.939682] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1058.940679] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12168dbd-4aa1-42b3-be2d-9eaba85c000a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.950241] env[69328]: DEBUG oslo_vmware.api [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1058.950241] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5270b1f7-530c-e7b5-02cb-f35257a764d7" [ 1058.950241] env[69328]: _type = "Task" [ 1058.950241] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.962265] env[69328]: DEBUG oslo_vmware.api [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5270b1f7-530c-e7b5-02cb-f35257a764d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.997811] env[69328]: DEBUG nova.network.neutron [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Updating instance_info_cache with network_info: [{"id": "e957681a-e4bc-4b9a-b2b7-a4783ae059b8", "address": "fa:16:3e:be:0a:24", "network": {"id": "620f277d-ca49-41da-83a0-afb8c393e26e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1223326239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cdc479a290524130b9d17e627a64b65a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86a35d07-53d3-46b3-92cb-ae34236c0f41", "external-id": "nsx-vlan-transportzone-811", "segmentation_id": 811, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape957681a-e4", "ovs_interfaceid": "e957681a-e4bc-4b9a-b2b7-a4783ae059b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1059.012301] env[69328]: DEBUG nova.network.neutron [-] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1059.177664] env[69328]: DEBUG nova.compute.manager [req-2461f6fa-8494-43c2-b39e-e9f9084a0866 req-38380ae8-d19e-47cf-b7e0-5d2e9c177afa service nova] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Received event network-vif-deleted-92b25b0d-9caa-4faa-ae5a-417a735a03f7 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1059.191081] env[69328]: DEBUG nova.scheduler.client.report [None req-427d6a49-3d5a-4df6-8145-722a95c1f0b6 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1059.342757] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273925, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.376949] env[69328]: DEBUG oslo_vmware.api [None req-81971890-40ec-481a-8320-72567755728d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273926, 'name': PowerOffVM_Task, 'duration_secs': 0.255717} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.379564] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-81971890-40ec-481a-8320-72567755728d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 14521ee3-d749-48b4-aeec-23c94ca2cf9f] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1059.379754] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-81971890-40ec-481a-8320-72567755728d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 14521ee3-d749-48b4-aeec-23c94ca2cf9f] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1059.381692] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-141ebbce-f174-4225-85e7-15f6c00bc26c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.393350] env[69328]: DEBUG oslo_vmware.api [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273924, 'name': MoveVirtualDisk_Task} progress is 35%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.437834] env[69328]: WARNING nova.network.neutron [None req-b59ab517-cfd7-4d65-808b-6c75bd5d9991 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] aed15283-4a79-4e99-8b6c-49cf754138de already exists in list: networks containing: ['aed15283-4a79-4e99-8b6c-49cf754138de']. ignoring it [ 1059.456438] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-81971890-40ec-481a-8320-72567755728d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 14521ee3-d749-48b4-aeec-23c94ca2cf9f] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1059.456626] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-81971890-40ec-481a-8320-72567755728d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 14521ee3-d749-48b4-aeec-23c94ca2cf9f] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1059.456840] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-81971890-40ec-481a-8320-72567755728d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Deleting the datastore file [datastore1] 14521ee3-d749-48b4-aeec-23c94ca2cf9f {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1059.465036] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f020c941-6dd8-406a-80b8-d440c60707eb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.466387] env[69328]: DEBUG oslo_vmware.api [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5270b1f7-530c-e7b5-02cb-f35257a764d7, 'name': SearchDatastore_Task, 'duration_secs': 0.084573} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.467824] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62031f8f-319f-4fd0-b762-e4a3ceaa4e4e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.472534] env[69328]: DEBUG oslo_vmware.api [None req-81971890-40ec-481a-8320-72567755728d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for the task: (returnval){ [ 1059.472534] env[69328]: value = "task-3273928" [ 1059.472534] env[69328]: _type = "Task" [ 1059.472534] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.477408] env[69328]: DEBUG oslo_vmware.api [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1059.477408] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52774cc2-b69b-09fe-807c-53f71f9f84f7" [ 1059.477408] env[69328]: _type = "Task" [ 1059.477408] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.485512] env[69328]: DEBUG oslo_vmware.api [None req-81971890-40ec-481a-8320-72567755728d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273928, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.492133] env[69328]: DEBUG oslo_vmware.api [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52774cc2-b69b-09fe-807c-53f71f9f84f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.500900] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Releasing lock "refresh_cache-204286d7-c806-48cb-85e9-b2a78571777c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1059.515241] env[69328]: INFO nova.compute.manager [-] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Took 1.65 seconds to deallocate network for instance. [ 1059.669319] env[69328]: DEBUG nova.network.neutron [req-9a83c68c-8f25-42d8-92e5-0a7687a25ae6 req-d186c53a-6bcd-43e6-9305-0da7918e7c6e service nova] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Updated VIF entry in instance network info cache for port d779425b-180c-47fd-b307-e02e14f18a26. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1059.670529] env[69328]: DEBUG nova.network.neutron [req-9a83c68c-8f25-42d8-92e5-0a7687a25ae6 req-d186c53a-6bcd-43e6-9305-0da7918e7c6e service nova] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Updating instance_info_cache with network_info: [{"id": "d779425b-180c-47fd-b307-e02e14f18a26", "address": "fa:16:3e:47:6b:17", "network": {"id": "4031def8-0553-461f-9528-aa338b9d7b2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1834835647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f357b5a9494b4849a83aa934c5d4e26b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "357d2811-e990-4985-9f9e-b158d10d3699", "external-id": "nsx-vlan-transportzone-641", "segmentation_id": 641, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd779425b-18", "ovs_interfaceid": "d779425b-180c-47fd-b307-e02e14f18a26", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1059.704701] env[69328]: DEBUG oslo_concurrency.lockutils [None req-427d6a49-3d5a-4df6-8145-722a95c1f0b6 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.068s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1059.709988] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9163baf9-03d1-4a5a-9933-2d2e53d64012 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.443s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1059.710337] env[69328]: DEBUG nova.objects.instance [None req-9163baf9-03d1-4a5a-9933-2d2e53d64012 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Lazy-loading 'resources' on Instance uuid 65fccb3f-5e0e-4140-be0a-5ba20f494d50 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1059.741161] env[69328]: INFO nova.scheduler.client.report [None req-427d6a49-3d5a-4df6-8145-722a95c1f0b6 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Deleted allocations for instance 19f537b7-90fc-4832-b137-e042e00a508b [ 1059.839725] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273925, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.896466] env[69328]: DEBUG oslo_vmware.api [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273924, 'name': MoveVirtualDisk_Task} progress is 57%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.944402] env[69328]: DEBUG nova.network.neutron [None req-b59ab517-cfd7-4d65-808b-6c75bd5d9991 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Updating instance_info_cache with network_info: [{"id": "95776220-5fd9-42a1-8bf9-cfb9fe49d62d", "address": "fa:16:3e:0c:f4:26", "network": {"id": "aed15283-4a79-4e99-8b6c-49cf754138de", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1271042528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.130", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30209bc93a4042488f15c73b7e4733d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap95776220-5f", "ovs_interfaceid": "95776220-5fd9-42a1-8bf9-cfb9fe49d62d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "2febedad-c6fa-48cf-893b-6baa5b6ddcd6", "address": "fa:16:3e:d1:26:ae", "network": {"id": "aed15283-4a79-4e99-8b6c-49cf754138de", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1271042528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30209bc93a4042488f15c73b7e4733d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2febedad-c6", "ovs_interfaceid": "2febedad-c6fa-48cf-893b-6baa5b6ddcd6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1059.985183] env[69328]: DEBUG oslo_vmware.api [None req-81971890-40ec-481a-8320-72567755728d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Task: {'id': task-3273928, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.325904} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.985866] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-81971890-40ec-481a-8320-72567755728d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1059.986066] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-81971890-40ec-481a-8320-72567755728d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 14521ee3-d749-48b4-aeec-23c94ca2cf9f] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1059.986258] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-81971890-40ec-481a-8320-72567755728d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 14521ee3-d749-48b4-aeec-23c94ca2cf9f] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1059.986487] env[69328]: INFO nova.compute.manager [None req-81971890-40ec-481a-8320-72567755728d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] [instance: 14521ee3-d749-48b4-aeec-23c94ca2cf9f] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1059.986714] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-81971890-40ec-481a-8320-72567755728d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1059.986932] env[69328]: DEBUG nova.compute.manager [-] [instance: 14521ee3-d749-48b4-aeec-23c94ca2cf9f] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1059.987040] env[69328]: DEBUG nova.network.neutron [-] [instance: 14521ee3-d749-48b4-aeec-23c94ca2cf9f] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1059.993017] env[69328]: DEBUG oslo_vmware.api [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52774cc2-b69b-09fe-807c-53f71f9f84f7, 'name': SearchDatastore_Task, 'duration_secs': 0.102468} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.994355] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1059.994618] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34/dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1059.994703] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-043c2869-8136-45b4-80af-85b3850d6a19 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.008049] env[69328]: DEBUG oslo_vmware.api [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1060.008049] env[69328]: value = "task-3273929" [ 1060.008049] env[69328]: _type = "Task" [ 1060.008049] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.018865] env[69328]: DEBUG oslo_vmware.api [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3273929, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.029758] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c83f3c34-5732-4221-98a5-17e4bc0a9442 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1060.174521] env[69328]: DEBUG oslo_concurrency.lockutils [req-9a83c68c-8f25-42d8-92e5-0a7687a25ae6 req-d186c53a-6bcd-43e6-9305-0da7918e7c6e service nova] Releasing lock "refresh_cache-dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1060.175574] env[69328]: DEBUG nova.compute.manager [req-9a83c68c-8f25-42d8-92e5-0a7687a25ae6 req-d186c53a-6bcd-43e6-9305-0da7918e7c6e service nova] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Received event network-vif-plugged-2febedad-c6fa-48cf-893b-6baa5b6ddcd6 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1060.175972] env[69328]: DEBUG oslo_concurrency.lockutils [req-9a83c68c-8f25-42d8-92e5-0a7687a25ae6 req-d186c53a-6bcd-43e6-9305-0da7918e7c6e service nova] Acquiring lock "dc050589-e37a-4798-9532-df4ecfab7eb1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1060.176367] env[69328]: DEBUG oslo_concurrency.lockutils [req-9a83c68c-8f25-42d8-92e5-0a7687a25ae6 req-d186c53a-6bcd-43e6-9305-0da7918e7c6e service nova] Lock "dc050589-e37a-4798-9532-df4ecfab7eb1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1060.176713] env[69328]: DEBUG oslo_concurrency.lockutils [req-9a83c68c-8f25-42d8-92e5-0a7687a25ae6 req-d186c53a-6bcd-43e6-9305-0da7918e7c6e service nova] Lock "dc050589-e37a-4798-9532-df4ecfab7eb1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1060.177032] env[69328]: DEBUG nova.compute.manager [req-9a83c68c-8f25-42d8-92e5-0a7687a25ae6 req-d186c53a-6bcd-43e6-9305-0da7918e7c6e service nova] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] No waiting events found dispatching network-vif-plugged-2febedad-c6fa-48cf-893b-6baa5b6ddcd6 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1060.177354] env[69328]: WARNING nova.compute.manager [req-9a83c68c-8f25-42d8-92e5-0a7687a25ae6 req-d186c53a-6bcd-43e6-9305-0da7918e7c6e service nova] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Received unexpected event network-vif-plugged-2febedad-c6fa-48cf-893b-6baa5b6ddcd6 for instance with vm_state active and task_state None. [ 1060.254149] env[69328]: DEBUG oslo_concurrency.lockutils [None req-427d6a49-3d5a-4df6-8145-722a95c1f0b6 tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Lock "19f537b7-90fc-4832-b137-e042e00a508b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.410s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1060.340942] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273925, 'name': CreateVM_Task, 'duration_secs': 1.806996} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.341807] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1060.344034] env[69328]: DEBUG oslo_concurrency.lockutils [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1060.344034] env[69328]: DEBUG oslo_concurrency.lockutils [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1060.344034] env[69328]: DEBUG oslo_concurrency.lockutils [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1060.344034] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8317165-dce0-4f76-8273-816a91305895 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.349409] env[69328]: DEBUG oslo_vmware.api [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 1060.349409] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52325fc4-31a8-d4b1-9f2e-0f5eeede2221" [ 1060.349409] env[69328]: _type = "Task" [ 1060.349409] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.363150] env[69328]: DEBUG oslo_vmware.api [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52325fc4-31a8-d4b1-9f2e-0f5eeede2221, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.396418] env[69328]: DEBUG oslo_vmware.api [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273924, 'name': MoveVirtualDisk_Task} progress is 80%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.447172] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b59ab517-cfd7-4d65-808b-6c75bd5d9991 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Releasing lock "refresh_cache-dc050589-e37a-4798-9532-df4ecfab7eb1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1060.448041] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b59ab517-cfd7-4d65-808b-6c75bd5d9991 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "dc050589-e37a-4798-9532-df4ecfab7eb1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1060.448041] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b59ab517-cfd7-4d65-808b-6c75bd5d9991 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquired lock "dc050589-e37a-4798-9532-df4ecfab7eb1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1060.449275] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a1a773f-07bb-4d26-8f80-08c87a6be395 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.471165] env[69328]: DEBUG nova.virt.hardware [None req-b59ab517-cfd7-4d65-808b-6c75bd5d9991 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1060.471165] env[69328]: DEBUG nova.virt.hardware [None req-b59ab517-cfd7-4d65-808b-6c75bd5d9991 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1060.471165] env[69328]: DEBUG nova.virt.hardware [None req-b59ab517-cfd7-4d65-808b-6c75bd5d9991 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1060.471165] env[69328]: DEBUG nova.virt.hardware [None req-b59ab517-cfd7-4d65-808b-6c75bd5d9991 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1060.471165] env[69328]: DEBUG nova.virt.hardware [None req-b59ab517-cfd7-4d65-808b-6c75bd5d9991 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1060.471165] env[69328]: DEBUG nova.virt.hardware [None req-b59ab517-cfd7-4d65-808b-6c75bd5d9991 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1060.471165] env[69328]: DEBUG nova.virt.hardware [None req-b59ab517-cfd7-4d65-808b-6c75bd5d9991 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1060.471165] env[69328]: DEBUG nova.virt.hardware [None req-b59ab517-cfd7-4d65-808b-6c75bd5d9991 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1060.471165] env[69328]: DEBUG nova.virt.hardware [None req-b59ab517-cfd7-4d65-808b-6c75bd5d9991 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1060.471165] env[69328]: DEBUG nova.virt.hardware [None req-b59ab517-cfd7-4d65-808b-6c75bd5d9991 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1060.473224] env[69328]: DEBUG nova.virt.hardware [None req-b59ab517-cfd7-4d65-808b-6c75bd5d9991 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1060.480794] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b59ab517-cfd7-4d65-808b-6c75bd5d9991 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Reconfiguring VM to attach interface {{(pid=69328) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1060.484255] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a09d1d30-ef56-433a-8eab-e7b61cf45174 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.505298] env[69328]: DEBUG oslo_vmware.api [None req-b59ab517-cfd7-4d65-808b-6c75bd5d9991 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for the task: (returnval){ [ 1060.505298] env[69328]: value = "task-3273930" [ 1060.505298] env[69328]: _type = "Task" [ 1060.505298] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.523421] env[69328]: DEBUG oslo_vmware.api [None req-b59ab517-cfd7-4d65-808b-6c75bd5d9991 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273930, 'name': ReconfigVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.528152] env[69328]: DEBUG oslo_vmware.api [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3273929, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.675120] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7786fce8-5cc6-4e08-83f8-7beee87495e2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.685956] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f30a33f4-f69f-4f8b-8f0a-ac4935fd23e3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.729692] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17735863-3936-4f77-824d-267cf56a959d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.743933] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bd332f9-867c-4cef-992c-7ba206b88551 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.762612] env[69328]: DEBUG nova.compute.provider_tree [None req-9163baf9-03d1-4a5a-9933-2d2e53d64012 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1060.843308] env[69328]: DEBUG nova.network.neutron [-] [instance: 14521ee3-d749-48b4-aeec-23c94ca2cf9f] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1060.862176] env[69328]: DEBUG oslo_vmware.api [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52325fc4-31a8-d4b1-9f2e-0f5eeede2221, 'name': SearchDatastore_Task, 'duration_secs': 0.086737} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.862513] env[69328]: DEBUG oslo_concurrency.lockutils [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1060.862777] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1060.863050] env[69328]: DEBUG oslo_concurrency.lockutils [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1060.863203] env[69328]: DEBUG oslo_concurrency.lockutils [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1060.863384] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1060.863654] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0fa0d73f-2151-4966-a9cb-98b7198d1b5a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.876837] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1060.876837] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1060.878101] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ee3149d-219c-433f-aecc-c64c656b92e1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.883630] env[69328]: DEBUG oslo_vmware.api [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 1060.883630] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]526a00a8-7774-a992-1fe9-b771ab7ecfc6" [ 1060.883630] env[69328]: _type = "Task" [ 1060.883630] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.894131] env[69328]: DEBUG oslo_vmware.api [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]526a00a8-7774-a992-1fe9-b771ab7ecfc6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.897096] env[69328]: DEBUG oslo_vmware.api [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273924, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.015782] env[69328]: DEBUG oslo_vmware.api [None req-b59ab517-cfd7-4d65-808b-6c75bd5d9991 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273930, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.023460] env[69328]: DEBUG oslo_vmware.api [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3273929, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.024577] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81f2ecfb-67be-49eb-8a53-d4c0cb7a50d5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.041140] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Updating instance '204286d7-c806-48cb-85e9-b2a78571777c' progress to 0 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1061.201545] env[69328]: DEBUG nova.compute.manager [req-f71c1696-10a6-47db-ab74-27c55f69f2fc req-4e54af6c-3506-43f0-ad2c-121818cca201 service nova] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Received event network-changed-2febedad-c6fa-48cf-893b-6baa5b6ddcd6 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1061.201873] env[69328]: DEBUG nova.compute.manager [req-f71c1696-10a6-47db-ab74-27c55f69f2fc req-4e54af6c-3506-43f0-ad2c-121818cca201 service nova] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Refreshing instance network info cache due to event network-changed-2febedad-c6fa-48cf-893b-6baa5b6ddcd6. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1061.202283] env[69328]: DEBUG oslo_concurrency.lockutils [req-f71c1696-10a6-47db-ab74-27c55f69f2fc req-4e54af6c-3506-43f0-ad2c-121818cca201 service nova] Acquiring lock "refresh_cache-dc050589-e37a-4798-9532-df4ecfab7eb1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1061.202535] env[69328]: DEBUG oslo_concurrency.lockutils [req-f71c1696-10a6-47db-ab74-27c55f69f2fc req-4e54af6c-3506-43f0-ad2c-121818cca201 service nova] Acquired lock "refresh_cache-dc050589-e37a-4798-9532-df4ecfab7eb1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1061.202786] env[69328]: DEBUG nova.network.neutron [req-f71c1696-10a6-47db-ab74-27c55f69f2fc req-4e54af6c-3506-43f0-ad2c-121818cca201 service nova] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Refreshing network info cache for port 2febedad-c6fa-48cf-893b-6baa5b6ddcd6 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1061.236185] env[69328]: DEBUG oslo_concurrency.lockutils [None req-33ab74df-e53e-4903-b7a4-7d99e7d8c17d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Acquiring lock "6b9757de-a274-4f4d-9b73-cc2ca92b4732" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1061.236459] env[69328]: DEBUG oslo_concurrency.lockutils [None req-33ab74df-e53e-4903-b7a4-7d99e7d8c17d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Lock "6b9757de-a274-4f4d-9b73-cc2ca92b4732" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1061.236734] env[69328]: DEBUG oslo_concurrency.lockutils [None req-33ab74df-e53e-4903-b7a4-7d99e7d8c17d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Acquiring lock "6b9757de-a274-4f4d-9b73-cc2ca92b4732-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1061.236991] env[69328]: DEBUG oslo_concurrency.lockutils [None req-33ab74df-e53e-4903-b7a4-7d99e7d8c17d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Lock "6b9757de-a274-4f4d-9b73-cc2ca92b4732-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1061.237193] env[69328]: DEBUG oslo_concurrency.lockutils [None req-33ab74df-e53e-4903-b7a4-7d99e7d8c17d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Lock "6b9757de-a274-4f4d-9b73-cc2ca92b4732-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1061.239113] env[69328]: INFO nova.compute.manager [None req-33ab74df-e53e-4903-b7a4-7d99e7d8c17d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 6b9757de-a274-4f4d-9b73-cc2ca92b4732] Terminating instance [ 1061.265630] env[69328]: DEBUG nova.scheduler.client.report [None req-9163baf9-03d1-4a5a-9933-2d2e53d64012 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1061.346988] env[69328]: INFO nova.compute.manager [-] [instance: 14521ee3-d749-48b4-aeec-23c94ca2cf9f] Took 1.36 seconds to deallocate network for instance. [ 1061.396909] env[69328]: DEBUG oslo_vmware.api [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273924, 'name': MoveVirtualDisk_Task, 'duration_secs': 3.401675} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.400249] env[69328]: INFO nova.virt.vmwareapi.ds_util [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_abfabe8c-b9ab-46bf-924a-f5186665a333/OSTACK_IMG_abfabe8c-b9ab-46bf-924a-f5186665a333.vmdk to [datastore2] devstack-image-cache_base/83b1e553-81a0-4dcf-a9f7-df6e5e0289ab/83b1e553-81a0-4dcf-a9f7-df6e5e0289ab.vmdk. [ 1061.400499] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Cleaning up location [datastore2] OSTACK_IMG_abfabe8c-b9ab-46bf-924a-f5186665a333 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1061.400697] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_abfabe8c-b9ab-46bf-924a-f5186665a333 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1061.401016] env[69328]: DEBUG oslo_vmware.api [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]526a00a8-7774-a992-1fe9-b771ab7ecfc6, 'name': SearchDatastore_Task, 'duration_secs': 0.047934} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.401222] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-66dec2ad-e020-4640-959a-5d8c7ad3e416 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.403377] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a8f55013-e5c6-4049-ad78-2427b994fbd6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.409436] env[69328]: DEBUG oslo_vmware.api [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 1061.409436] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e5993e-af57-10cc-5092-6cba7e6339f8" [ 1061.409436] env[69328]: _type = "Task" [ 1061.409436] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.410740] env[69328]: DEBUG oslo_vmware.api [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 1061.410740] env[69328]: value = "task-3273931" [ 1061.410740] env[69328]: _type = "Task" [ 1061.410740] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.421314] env[69328]: DEBUG oslo_vmware.api [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e5993e-af57-10cc-5092-6cba7e6339f8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.424334] env[69328]: DEBUG oslo_vmware.api [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273931, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.474171] env[69328]: DEBUG nova.compute.manager [req-ed3c1366-66c0-48f5-afbf-e09b5500b519 req-a22062e4-6ed9-447e-adfa-fe21a9202f4c service nova] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Received event network-changed-61188e10-aa7e-4ec8-99f4-bc6a8380b3be {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1061.474382] env[69328]: DEBUG nova.compute.manager [req-ed3c1366-66c0-48f5-afbf-e09b5500b519 req-a22062e4-6ed9-447e-adfa-fe21a9202f4c service nova] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Refreshing instance network info cache due to event network-changed-61188e10-aa7e-4ec8-99f4-bc6a8380b3be. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1061.474556] env[69328]: DEBUG oslo_concurrency.lockutils [req-ed3c1366-66c0-48f5-afbf-e09b5500b519 req-a22062e4-6ed9-447e-adfa-fe21a9202f4c service nova] Acquiring lock "refresh_cache-071c1837-9d0b-4b69-b16e-991b300385fb" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1061.474701] env[69328]: DEBUG oslo_concurrency.lockutils [req-ed3c1366-66c0-48f5-afbf-e09b5500b519 req-a22062e4-6ed9-447e-adfa-fe21a9202f4c service nova] Acquired lock "refresh_cache-071c1837-9d0b-4b69-b16e-991b300385fb" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1061.474860] env[69328]: DEBUG nova.network.neutron [req-ed3c1366-66c0-48f5-afbf-e09b5500b519 req-a22062e4-6ed9-447e-adfa-fe21a9202f4c service nova] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Refreshing network info cache for port 61188e10-aa7e-4ec8-99f4-bc6a8380b3be {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1061.516151] env[69328]: DEBUG oslo_vmware.api [None req-b59ab517-cfd7-4d65-808b-6c75bd5d9991 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273930, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.528467] env[69328]: DEBUG oslo_vmware.api [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3273929, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.547739] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1061.548121] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-519b37ef-3655-4597-8381-cfe93b874af8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.557614] env[69328]: DEBUG oslo_vmware.api [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 1061.557614] env[69328]: value = "task-3273932" [ 1061.557614] env[69328]: _type = "Task" [ 1061.557614] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.569022] env[69328]: DEBUG oslo_vmware.api [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273932, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.743636] env[69328]: DEBUG nova.compute.manager [None req-33ab74df-e53e-4903-b7a4-7d99e7d8c17d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 6b9757de-a274-4f4d-9b73-cc2ca92b4732] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1061.743636] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-33ab74df-e53e-4903-b7a4-7d99e7d8c17d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 6b9757de-a274-4f4d-9b73-cc2ca92b4732] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1061.744120] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7bf9b7f-cb68-439c-83e0-58e2e307cbaa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.755570] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-33ab74df-e53e-4903-b7a4-7d99e7d8c17d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 6b9757de-a274-4f4d-9b73-cc2ca92b4732] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1061.755787] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4bd6f80b-df2b-4f39-b80d-adcd3617d4a5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.762741] env[69328]: DEBUG oslo_vmware.api [None req-33ab74df-e53e-4903-b7a4-7d99e7d8c17d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Waiting for the task: (returnval){ [ 1061.762741] env[69328]: value = "task-3273933" [ 1061.762741] env[69328]: _type = "Task" [ 1061.762741] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.772719] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9163baf9-03d1-4a5a-9933-2d2e53d64012 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.063s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1061.779015] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.188s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1061.780563] env[69328]: INFO nova.compute.claims [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1061.783808] env[69328]: DEBUG oslo_vmware.api [None req-33ab74df-e53e-4903-b7a4-7d99e7d8c17d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273933, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.808674] env[69328]: INFO nova.scheduler.client.report [None req-9163baf9-03d1-4a5a-9933-2d2e53d64012 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Deleted allocations for instance 65fccb3f-5e0e-4140-be0a-5ba20f494d50 [ 1061.853579] env[69328]: DEBUG oslo_concurrency.lockutils [None req-81971890-40ec-481a-8320-72567755728d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1061.928144] env[69328]: DEBUG oslo_vmware.api [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273931, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.208848} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.928415] env[69328]: DEBUG oslo_vmware.api [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e5993e-af57-10cc-5092-6cba7e6339f8, 'name': SearchDatastore_Task, 'duration_secs': 0.05372} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.928630] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1061.928794] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Releasing lock "[datastore2] devstack-image-cache_base/83b1e553-81a0-4dcf-a9f7-df6e5e0289ab/83b1e553-81a0-4dcf-a9f7-df6e5e0289ab.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1061.929055] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/83b1e553-81a0-4dcf-a9f7-df6e5e0289ab/83b1e553-81a0-4dcf-a9f7-df6e5e0289ab.vmdk to [datastore2] 76210566-12d7-4f6a-afa1-6329e87e0f85/76210566-12d7-4f6a-afa1-6329e87e0f85.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1061.929317] env[69328]: DEBUG oslo_concurrency.lockutils [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1061.929529] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 65e38a02-880b-46e2-8866-645a9fc17c7a/65e38a02-880b-46e2-8866-645a9fc17c7a.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1061.930171] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3ffb5f51-936e-48a7-a115-79f9f4297591 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.931885] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-69990f89-7b53-43bd-95e8-e2f9740604ac {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.939134] env[69328]: DEBUG oslo_vmware.api [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 1061.939134] env[69328]: value = "task-3273935" [ 1061.939134] env[69328]: _type = "Task" [ 1061.939134] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.940394] env[69328]: DEBUG oslo_vmware.api [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 1061.940394] env[69328]: value = "task-3273934" [ 1061.940394] env[69328]: _type = "Task" [ 1061.940394] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.957052] env[69328]: DEBUG oslo_vmware.api [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273935, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.957292] env[69328]: DEBUG oslo_vmware.api [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273934, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.019092] env[69328]: DEBUG oslo_vmware.api [None req-b59ab517-cfd7-4d65-808b-6c75bd5d9991 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273930, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.028083] env[69328]: DEBUG oslo_vmware.api [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3273929, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.866284} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.028366] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34/dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1062.028588] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1062.028858] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5c2b0920-8984-4cb3-b54c-e8e64bf06d92 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.035061] env[69328]: DEBUG oslo_vmware.api [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1062.035061] env[69328]: value = "task-3273936" [ 1062.035061] env[69328]: _type = "Task" [ 1062.035061] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.039292] env[69328]: DEBUG nova.network.neutron [req-f71c1696-10a6-47db-ab74-27c55f69f2fc req-4e54af6c-3506-43f0-ad2c-121818cca201 service nova] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Updated VIF entry in instance network info cache for port 2febedad-c6fa-48cf-893b-6baa5b6ddcd6. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1062.039757] env[69328]: DEBUG nova.network.neutron [req-f71c1696-10a6-47db-ab74-27c55f69f2fc req-4e54af6c-3506-43f0-ad2c-121818cca201 service nova] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Updating instance_info_cache with network_info: [{"id": "95776220-5fd9-42a1-8bf9-cfb9fe49d62d", "address": "fa:16:3e:0c:f4:26", "network": {"id": "aed15283-4a79-4e99-8b6c-49cf754138de", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1271042528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.130", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30209bc93a4042488f15c73b7e4733d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap95776220-5f", "ovs_interfaceid": "95776220-5fd9-42a1-8bf9-cfb9fe49d62d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "2febedad-c6fa-48cf-893b-6baa5b6ddcd6", "address": "fa:16:3e:d1:26:ae", "network": {"id": "aed15283-4a79-4e99-8b6c-49cf754138de", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1271042528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30209bc93a4042488f15c73b7e4733d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2febedad-c6", "ovs_interfaceid": "2febedad-c6fa-48cf-893b-6baa5b6ddcd6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1062.046449] env[69328]: DEBUG oslo_vmware.api [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3273936, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.067437] env[69328]: DEBUG oslo_vmware.api [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273932, 'name': PowerOffVM_Task, 'duration_secs': 0.328413} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.067779] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1062.067896] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Updating instance '204286d7-c806-48cb-85e9-b2a78571777c' progress to 17 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1062.208554] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7b163c4-7b2f-42b3-a715-9cc28e099410 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.211873] env[69328]: DEBUG nova.network.neutron [req-ed3c1366-66c0-48f5-afbf-e09b5500b519 req-a22062e4-6ed9-447e-adfa-fe21a9202f4c service nova] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Updated VIF entry in instance network info cache for port 61188e10-aa7e-4ec8-99f4-bc6a8380b3be. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1062.212299] env[69328]: DEBUG nova.network.neutron [req-ed3c1366-66c0-48f5-afbf-e09b5500b519 req-a22062e4-6ed9-447e-adfa-fe21a9202f4c service nova] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Updating instance_info_cache with network_info: [{"id": "61188e10-aa7e-4ec8-99f4-bc6a8380b3be", "address": "fa:16:3e:f9:19:ef", "network": {"id": "2e8bdd1b-0cca-4f7c-bba3-ff1750073020", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2058593014-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.217", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "088bc9e3aeb449baa0a522342d57d183", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61188e10-aa", "ovs_interfaceid": "61188e10-aa7e-4ec8-99f4-bc6a8380b3be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1062.217664] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-84abe1cf-3c03-4c9f-b99b-511de2ebbd6a tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Suspending the VM {{(pid=69328) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1062.217664] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-d79d1bb9-a476-4c44-9776-e89b2a42d365 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.223510] env[69328]: DEBUG oslo_vmware.api [None req-84abe1cf-3c03-4c9f-b99b-511de2ebbd6a tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Waiting for the task: (returnval){ [ 1062.223510] env[69328]: value = "task-3273937" [ 1062.223510] env[69328]: _type = "Task" [ 1062.223510] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.232633] env[69328]: DEBUG oslo_vmware.api [None req-84abe1cf-3c03-4c9f-b99b-511de2ebbd6a tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273937, 'name': SuspendVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.275812] env[69328]: DEBUG oslo_vmware.api [None req-33ab74df-e53e-4903-b7a4-7d99e7d8c17d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273933, 'name': PowerOffVM_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.319620] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9163baf9-03d1-4a5a-9933-2d2e53d64012 tempest-ServersTestJSON-1775992188 tempest-ServersTestJSON-1775992188-project-member] Lock "65fccb3f-5e0e-4140-be0a-5ba20f494d50" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.973s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1062.454522] env[69328]: DEBUG oslo_vmware.api [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273934, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.457524] env[69328]: DEBUG oslo_vmware.api [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273935, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.516772] env[69328]: DEBUG oslo_vmware.api [None req-b59ab517-cfd7-4d65-808b-6c75bd5d9991 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273930, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.545093] env[69328]: DEBUG oslo_concurrency.lockutils [req-f71c1696-10a6-47db-ab74-27c55f69f2fc req-4e54af6c-3506-43f0-ad2c-121818cca201 service nova] Releasing lock "refresh_cache-dc050589-e37a-4798-9532-df4ecfab7eb1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1062.545438] env[69328]: DEBUG nova.compute.manager [req-f71c1696-10a6-47db-ab74-27c55f69f2fc req-4e54af6c-3506-43f0-ad2c-121818cca201 service nova] [instance: 14521ee3-d749-48b4-aeec-23c94ca2cf9f] Received event network-vif-deleted-14cfba2e-1458-4c09-a1bb-825784ca30af {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1062.545945] env[69328]: DEBUG oslo_vmware.api [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3273936, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.289313} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.546317] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1062.547169] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40d8ecab-e802-45d7-9962-63535e8cebec {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.570058] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Reconfiguring VM instance instance-00000066 to attach disk [datastore2] dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34/dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1062.570359] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b373757d-e4cc-41b9-8aa8-5d5a45130c10 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.586608] env[69328]: DEBUG nova.virt.hardware [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:34:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1062.587581] env[69328]: DEBUG nova.virt.hardware [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1062.587581] env[69328]: DEBUG nova.virt.hardware [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1062.587581] env[69328]: DEBUG nova.virt.hardware [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1062.587581] env[69328]: DEBUG nova.virt.hardware [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1062.587581] env[69328]: DEBUG nova.virt.hardware [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1062.587875] env[69328]: DEBUG nova.virt.hardware [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1062.587875] env[69328]: DEBUG nova.virt.hardware [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1062.588171] env[69328]: DEBUG nova.virt.hardware [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1062.588171] env[69328]: DEBUG nova.virt.hardware [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1062.588365] env[69328]: DEBUG nova.virt.hardware [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1062.593468] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dbac3098-cdbf-4b79-94ff-66a782aa936f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.610653] env[69328]: DEBUG oslo_vmware.api [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 1062.610653] env[69328]: value = "task-3273938" [ 1062.610653] env[69328]: _type = "Task" [ 1062.610653] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.612789] env[69328]: DEBUG oslo_vmware.api [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1062.612789] env[69328]: value = "task-3273939" [ 1062.612789] env[69328]: _type = "Task" [ 1062.612789] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.633517] env[69328]: DEBUG oslo_vmware.api [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273938, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.639692] env[69328]: DEBUG oslo_vmware.api [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3273939, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.715980] env[69328]: DEBUG oslo_concurrency.lockutils [req-ed3c1366-66c0-48f5-afbf-e09b5500b519 req-a22062e4-6ed9-447e-adfa-fe21a9202f4c service nova] Releasing lock "refresh_cache-071c1837-9d0b-4b69-b16e-991b300385fb" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1062.734229] env[69328]: DEBUG oslo_vmware.api [None req-84abe1cf-3c03-4c9f-b99b-511de2ebbd6a tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273937, 'name': SuspendVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.774899] env[69328]: DEBUG oslo_vmware.api [None req-33ab74df-e53e-4903-b7a4-7d99e7d8c17d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273933, 'name': PowerOffVM_Task, 'duration_secs': 0.617689} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.774899] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-33ab74df-e53e-4903-b7a4-7d99e7d8c17d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 6b9757de-a274-4f4d-9b73-cc2ca92b4732] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1062.774899] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-33ab74df-e53e-4903-b7a4-7d99e7d8c17d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 6b9757de-a274-4f4d-9b73-cc2ca92b4732] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1062.776746] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f300a70d-f9db-4e77-ad96-79a6a1f674c9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.896237] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-33ab74df-e53e-4903-b7a4-7d99e7d8c17d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 6b9757de-a274-4f4d-9b73-cc2ca92b4732] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1062.896490] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-33ab74df-e53e-4903-b7a4-7d99e7d8c17d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 6b9757de-a274-4f4d-9b73-cc2ca92b4732] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1062.896668] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-33ab74df-e53e-4903-b7a4-7d99e7d8c17d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Deleting the datastore file [datastore2] 6b9757de-a274-4f4d-9b73-cc2ca92b4732 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1062.897015] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-77f3010c-f1d3-4789-a84a-34f96870ae2a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.908439] env[69328]: DEBUG oslo_vmware.api [None req-33ab74df-e53e-4903-b7a4-7d99e7d8c17d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Waiting for the task: (returnval){ [ 1062.908439] env[69328]: value = "task-3273941" [ 1062.908439] env[69328]: _type = "Task" [ 1062.908439] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.918142] env[69328]: DEBUG oslo_vmware.api [None req-33ab74df-e53e-4903-b7a4-7d99e7d8c17d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273941, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.957086] env[69328]: DEBUG oslo_vmware.api [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273935, 'name': CopyVirtualDisk_Task} progress is 15%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.964084] env[69328]: DEBUG oslo_vmware.api [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273934, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.019246] env[69328]: DEBUG oslo_vmware.api [None req-b59ab517-cfd7-4d65-808b-6c75bd5d9991 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273930, 'name': ReconfigVM_Task, 'duration_secs': 2.336828} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.022189] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b59ab517-cfd7-4d65-808b-6c75bd5d9991 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Releasing lock "dc050589-e37a-4798-9532-df4ecfab7eb1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1063.022512] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b59ab517-cfd7-4d65-808b-6c75bd5d9991 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Reconfigured VM to attach interface {{(pid=69328) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1063.132846] env[69328]: DEBUG oslo_vmware.api [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3273939, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.138185] env[69328]: DEBUG oslo_vmware.api [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273938, 'name': ReconfigVM_Task, 'duration_secs': 0.34192} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.138185] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Updating instance '204286d7-c806-48cb-85e9-b2a78571777c' progress to 33 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1063.222897] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16f4927a-46bb-4166-af64-ecc7ec6f7399 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.240214] env[69328]: DEBUG oslo_vmware.api [None req-84abe1cf-3c03-4c9f-b99b-511de2ebbd6a tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273937, 'name': SuspendVM_Task} progress is 54%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.243307] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0e19a37-3e61-4f80-a8e8-ecf0ea3b625a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.286683] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5daba7f3-7a97-4f2e-b2aa-b3fec188a900 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.298515] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aee8cb1-0f2a-4970-aa32-756f6aa2ca81 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.316789] env[69328]: DEBUG nova.compute.provider_tree [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1063.427286] env[69328]: DEBUG oslo_vmware.api [None req-33ab74df-e53e-4903-b7a4-7d99e7d8c17d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273941, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.454747] env[69328]: DEBUG oslo_vmware.api [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273934, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.400355} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.458281] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 65e38a02-880b-46e2-8866-645a9fc17c7a/65e38a02-880b-46e2-8866-645a9fc17c7a.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1063.458854] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1063.459339] env[69328]: DEBUG oslo_vmware.api [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273935, 'name': CopyVirtualDisk_Task} progress is 32%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.460083] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-21a2cff2-0b28-4b41-828d-a7a631922a00 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.468758] env[69328]: DEBUG oslo_vmware.api [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 1063.468758] env[69328]: value = "task-3273942" [ 1063.468758] env[69328]: _type = "Task" [ 1063.468758] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.478468] env[69328]: DEBUG oslo_vmware.api [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273942, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.528213] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b59ab517-cfd7-4d65-808b-6c75bd5d9991 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "interface-dc050589-e37a-4798-9532-df4ecfab7eb1-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 8.479s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1063.638931] env[69328]: DEBUG oslo_vmware.api [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3273939, 'name': ReconfigVM_Task, 'duration_secs': 0.903393} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.639379] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Reconfigured VM instance instance-00000066 to attach disk [datastore2] dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34/dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1063.640220] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2858013f-2ccf-45a8-a96a-50d64125e192 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.645434] env[69328]: DEBUG nova.virt.hardware [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1063.645692] env[69328]: DEBUG nova.virt.hardware [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1063.645848] env[69328]: DEBUG nova.virt.hardware [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1063.646036] env[69328]: DEBUG nova.virt.hardware [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1063.646178] env[69328]: DEBUG nova.virt.hardware [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1063.646320] env[69328]: DEBUG nova.virt.hardware [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1063.648820] env[69328]: DEBUG nova.virt.hardware [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1063.649070] env[69328]: DEBUG nova.virt.hardware [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1063.649265] env[69328]: DEBUG nova.virt.hardware [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1063.649465] env[69328]: DEBUG nova.virt.hardware [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1063.649644] env[69328]: DEBUG nova.virt.hardware [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1063.655364] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Reconfiguring VM instance instance-00000060 to detach disk 2000 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1063.657155] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1dd02e66-5ccf-4465-9784-e81c63b087ee {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.672319] env[69328]: DEBUG oslo_vmware.api [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1063.672319] env[69328]: value = "task-3273943" [ 1063.672319] env[69328]: _type = "Task" [ 1063.672319] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.685648] env[69328]: DEBUG oslo_vmware.api [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3273943, 'name': Rename_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.686083] env[69328]: DEBUG oslo_vmware.api [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 1063.686083] env[69328]: value = "task-3273944" [ 1063.686083] env[69328]: _type = "Task" [ 1063.686083] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.701919] env[69328]: DEBUG oslo_vmware.api [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273944, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.741132] env[69328]: DEBUG oslo_vmware.api [None req-84abe1cf-3c03-4c9f-b99b-511de2ebbd6a tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273937, 'name': SuspendVM_Task, 'duration_secs': 1.407924} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.741520] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-84abe1cf-3c03-4c9f-b99b-511de2ebbd6a tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Suspended the VM {{(pid=69328) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1063.741768] env[69328]: DEBUG nova.compute.manager [None req-84abe1cf-3c03-4c9f-b99b-511de2ebbd6a tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1063.742827] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98339584-2e76-4453-a441-23e2ac1ab088 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.820732] env[69328]: DEBUG nova.scheduler.client.report [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1063.923344] env[69328]: DEBUG oslo_vmware.api [None req-33ab74df-e53e-4903-b7a4-7d99e7d8c17d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273941, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.953491] env[69328]: DEBUG oslo_vmware.api [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273935, 'name': CopyVirtualDisk_Task} progress is 52%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.981843] env[69328]: DEBUG oslo_vmware.api [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273942, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077862} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.982216] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1063.983156] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dfea6f8-e0ec-4d48-9578-f845b67783e4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.010494] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Reconfiguring VM instance instance-00000052 to attach disk [datastore2] 65e38a02-880b-46e2-8866-645a9fc17c7a/65e38a02-880b-46e2-8866-645a9fc17c7a.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1064.010853] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9e5b2a89-e6e5-406e-9192-d07b20789812 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.040921] env[69328]: DEBUG oslo_vmware.api [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 1064.040921] env[69328]: value = "task-3273945" [ 1064.040921] env[69328]: _type = "Task" [ 1064.040921] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.053023] env[69328]: DEBUG oslo_vmware.api [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273945, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.183254] env[69328]: DEBUG oslo_vmware.api [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3273943, 'name': Rename_Task, 'duration_secs': 0.347918} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.183517] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1064.183782] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0d0a2188-1c08-4448-b9fa-1990ff89a0c0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.192074] env[69328]: DEBUG oslo_vmware.api [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1064.192074] env[69328]: value = "task-3273946" [ 1064.192074] env[69328]: _type = "Task" [ 1064.192074] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.199270] env[69328]: DEBUG oslo_vmware.api [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273944, 'name': ReconfigVM_Task, 'duration_secs': 0.403832} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.199554] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Reconfigured VM instance instance-00000060 to detach disk 2000 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1064.203328] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4f9e763-aca8-4752-ac7e-cb98a1cb2067 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.206428] env[69328]: DEBUG oslo_vmware.api [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3273946, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.232609] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Reconfiguring VM instance instance-00000060 to attach disk [datastore2] 204286d7-c806-48cb-85e9-b2a78571777c/204286d7-c806-48cb-85e9-b2a78571777c.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1064.233015] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-11f1caa2-c70b-4fb8-924c-bce2e37be11c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.253515] env[69328]: DEBUG oslo_vmware.api [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 1064.253515] env[69328]: value = "task-3273947" [ 1064.253515] env[69328]: _type = "Task" [ 1064.253515] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.267708] env[69328]: DEBUG oslo_vmware.api [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273947, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.326874] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.548s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1064.327538] env[69328]: DEBUG nova.compute.manager [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1064.335999] env[69328]: DEBUG oslo_concurrency.lockutils [None req-94a5dc04-40f3-46a5-8303-2e27cc56aed2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.064s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1064.335999] env[69328]: DEBUG nova.objects.instance [None req-94a5dc04-40f3-46a5-8303-2e27cc56aed2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lazy-loading 'resources' on Instance uuid ac0f967d-18c8-45d8-94ca-829a1fe11451 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1064.428706] env[69328]: DEBUG oslo_vmware.api [None req-33ab74df-e53e-4903-b7a4-7d99e7d8c17d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273941, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.456390] env[69328]: DEBUG oslo_vmware.api [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273935, 'name': CopyVirtualDisk_Task} progress is 71%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.555634] env[69328]: DEBUG oslo_vmware.api [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273945, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.707362] env[69328]: DEBUG oslo_vmware.api [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3273946, 'name': PowerOnVM_Task} progress is 87%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.765942] env[69328]: DEBUG oslo_vmware.api [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273947, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.838118] env[69328]: DEBUG nova.compute.utils [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1064.839649] env[69328]: DEBUG nova.compute.manager [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1064.839823] env[69328]: DEBUG nova.network.neutron [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1064.914712] env[69328]: DEBUG nova.policy [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '781cfc30588942789f4e7cda072b2f68', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0e9e98f83e974a32b0db6ce5e8442012', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 1064.928596] env[69328]: DEBUG oslo_vmware.api [None req-33ab74df-e53e-4903-b7a4-7d99e7d8c17d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273941, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.950675} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.929533] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-33ab74df-e53e-4903-b7a4-7d99e7d8c17d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1064.929717] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-33ab74df-e53e-4903-b7a4-7d99e7d8c17d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 6b9757de-a274-4f4d-9b73-cc2ca92b4732] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1064.929889] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-33ab74df-e53e-4903-b7a4-7d99e7d8c17d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 6b9757de-a274-4f4d-9b73-cc2ca92b4732] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1064.930100] env[69328]: INFO nova.compute.manager [None req-33ab74df-e53e-4903-b7a4-7d99e7d8c17d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: 6b9757de-a274-4f4d-9b73-cc2ca92b4732] Took 3.19 seconds to destroy the instance on the hypervisor. [ 1064.930352] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-33ab74df-e53e-4903-b7a4-7d99e7d8c17d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1064.930542] env[69328]: DEBUG nova.compute.manager [-] [instance: 6b9757de-a274-4f4d-9b73-cc2ca92b4732] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1064.930631] env[69328]: DEBUG nova.network.neutron [-] [instance: 6b9757de-a274-4f4d-9b73-cc2ca92b4732] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1064.958701] env[69328]: DEBUG oslo_vmware.api [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273935, 'name': CopyVirtualDisk_Task} progress is 94%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.060575] env[69328]: DEBUG oslo_vmware.api [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273945, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.212420] env[69328]: DEBUG oslo_vmware.api [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3273946, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.269302] env[69328]: DEBUG oslo_vmware.api [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273947, 'name': ReconfigVM_Task, 'duration_secs': 0.86963} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.269586] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Reconfigured VM instance instance-00000060 to attach disk [datastore2] 204286d7-c806-48cb-85e9-b2a78571777c/204286d7-c806-48cb-85e9-b2a78571777c.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1065.269875] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Updating instance '204286d7-c806-48cb-85e9-b2a78571777c' progress to 50 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1065.275313] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58a2d29f-6986-459c-b450-ebbd6a8e8c87 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.285969] env[69328]: DEBUG nova.network.neutron [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Successfully created port: 4516486f-d6cd-476a-a5ad-3d3fd9191731 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1065.294301] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-539a004c-a195-4130-8e78-6b9dcfa04c5f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.324752] env[69328]: INFO nova.compute.manager [None req-f2a3fd15-3087-4441-80ac-6825cb2e8903 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Resuming [ 1065.324752] env[69328]: DEBUG nova.objects.instance [None req-f2a3fd15-3087-4441-80ac-6825cb2e8903 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Lazy-loading 'flavor' on Instance uuid a0952fdf-5570-4112-bc4d-e9f9cee1599c {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1065.329679] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba1777eb-0d23-481a-8bc8-6d67e00a3504 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.338391] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acdde0d0-435c-4be6-807a-8ddbf5e40341 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.353911] env[69328]: DEBUG nova.compute.manager [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1065.356940] env[69328]: DEBUG nova.compute.provider_tree [None req-94a5dc04-40f3-46a5-8303-2e27cc56aed2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1065.413785] env[69328]: DEBUG oslo_concurrency.lockutils [None req-45ccd5e6-2000-49b0-9ef5-5f1f41d2adf6 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "interface-dc050589-e37a-4798-9532-df4ecfab7eb1-55e62774-2eb7-4bcb-92f6-a63fc6216cda" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1065.414355] env[69328]: DEBUG oslo_concurrency.lockutils [None req-45ccd5e6-2000-49b0-9ef5-5f1f41d2adf6 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "interface-dc050589-e37a-4798-9532-df4ecfab7eb1-55e62774-2eb7-4bcb-92f6-a63fc6216cda" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1065.415287] env[69328]: DEBUG nova.objects.instance [None req-45ccd5e6-2000-49b0-9ef5-5f1f41d2adf6 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lazy-loading 'flavor' on Instance uuid dc050589-e37a-4798-9532-df4ecfab7eb1 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1065.455707] env[69328]: DEBUG oslo_vmware.api [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273935, 'name': CopyVirtualDisk_Task, 'duration_secs': 3.183875} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.455978] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/83b1e553-81a0-4dcf-a9f7-df6e5e0289ab/83b1e553-81a0-4dcf-a9f7-df6e5e0289ab.vmdk to [datastore2] 76210566-12d7-4f6a-afa1-6329e87e0f85/76210566-12d7-4f6a-afa1-6329e87e0f85.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1065.456943] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67a04105-568d-452d-9827-8ee4d94511f8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.481769] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Reconfiguring VM instance instance-0000004c to attach disk [datastore2] 76210566-12d7-4f6a-afa1-6329e87e0f85/76210566-12d7-4f6a-afa1-6329e87e0f85.vmdk or device None with type streamOptimized {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1065.481769] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9fce816d-422f-4400-b59d-ea69d8d1aefd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.502263] env[69328]: DEBUG oslo_vmware.api [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 1065.502263] env[69328]: value = "task-3273948" [ 1065.502263] env[69328]: _type = "Task" [ 1065.502263] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.511504] env[69328]: DEBUG oslo_vmware.api [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273948, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.556733] env[69328]: DEBUG oslo_vmware.api [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273945, 'name': ReconfigVM_Task, 'duration_secs': 1.056537} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.557178] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Reconfigured VM instance instance-00000052 to attach disk [datastore2] 65e38a02-880b-46e2-8866-645a9fc17c7a/65e38a02-880b-46e2-8866-645a9fc17c7a.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1065.558052] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fb053fec-75d9-4312-9abc-523f3b723421 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.565271] env[69328]: DEBUG oslo_vmware.api [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 1065.565271] env[69328]: value = "task-3273949" [ 1065.565271] env[69328]: _type = "Task" [ 1065.565271] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.577730] env[69328]: DEBUG oslo_vmware.api [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273949, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.648956] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1065.648956] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1065.703883] env[69328]: DEBUG oslo_vmware.api [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3273946, 'name': PowerOnVM_Task, 'duration_secs': 1.080674} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.704285] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1065.704459] env[69328]: INFO nova.compute.manager [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Took 10.66 seconds to spawn the instance on the hypervisor. [ 1065.705024] env[69328]: DEBUG nova.compute.manager [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1065.705544] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f38b4455-8809-4f7f-b94f-e348bc029eb2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.788171] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00696caa-23a4-4a45-8544-24334ad41060 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.808193] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c33ae3c-1aa9-422d-9086-a686f45ca926 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.827291] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Updating instance '204286d7-c806-48cb-85e9-b2a78571777c' progress to 67 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1065.832994] env[69328]: DEBUG nova.network.neutron [-] [instance: 6b9757de-a274-4f4d-9b73-cc2ca92b4732] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1065.863373] env[69328]: DEBUG nova.scheduler.client.report [None req-94a5dc04-40f3-46a5-8303-2e27cc56aed2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1065.895826] env[69328]: DEBUG nova.compute.manager [req-c09dbdbe-a5f7-43b6-b4f7-ccc0f282c544 req-02c4daa3-b4ef-44cb-a710-b7969b6d1432 service nova] [instance: 6b9757de-a274-4f4d-9b73-cc2ca92b4732] Received event network-vif-deleted-51df3c9d-fc9a-47c4-83a8-917ec6fedbbf {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1066.012897] env[69328]: DEBUG oslo_vmware.api [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273948, 'name': ReconfigVM_Task, 'duration_secs': 0.311436} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.013225] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Reconfigured VM instance instance-0000004c to attach disk [datastore2] 76210566-12d7-4f6a-afa1-6329e87e0f85/76210566-12d7-4f6a-afa1-6329e87e0f85.vmdk or device None with type streamOptimized {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1066.014391] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'boot_index': 0, 'encryption_secret_uuid': None, 'disk_bus': None, 'device_name': '/dev/sda', 'encrypted': False, 'encryption_options': None, 'size': 0, 'encryption_format': None, 'guest_format': None, 'device_type': 'disk', 'image_id': 'a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318'}], 'ephemerals': [], 'block_device_mapping': [{'mount_device': '/dev/sdb', 'boot_index': None, 'delete_on_termination': False, 'disk_bus': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653905', 'volume_id': '68ea45a2-2443-494f-afc8-d4648ea33fa0', 'name': 'volume-68ea45a2-2443-494f-afc8-d4648ea33fa0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '76210566-12d7-4f6a-afa1-6329e87e0f85', 'attached_at': '', 'detached_at': '', 'volume_id': '68ea45a2-2443-494f-afc8-d4648ea33fa0', 'serial': '68ea45a2-2443-494f-afc8-d4648ea33fa0'}, 'guest_format': None, 'device_type': None, 'attachment_id': 'c4b7862d-df68-49ad-8a7f-7a4fb0f31ffd', 'volume_type': None}], 'swap': None} {{(pid=69328) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1066.014601] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Volume attach. Driver type: vmdk {{(pid=69328) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1066.014792] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653905', 'volume_id': '68ea45a2-2443-494f-afc8-d4648ea33fa0', 'name': 'volume-68ea45a2-2443-494f-afc8-d4648ea33fa0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '76210566-12d7-4f6a-afa1-6329e87e0f85', 'attached_at': '', 'detached_at': '', 'volume_id': '68ea45a2-2443-494f-afc8-d4648ea33fa0', 'serial': '68ea45a2-2443-494f-afc8-d4648ea33fa0'} {{(pid=69328) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1066.015579] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54f5f40f-8123-4530-a354-2192cd68ccc9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.032409] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96305e6c-5aeb-418b-9861-3c9c4c42f08a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.057037] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Reconfiguring VM instance instance-0000004c to attach disk [datastore2] volume-68ea45a2-2443-494f-afc8-d4648ea33fa0/volume-68ea45a2-2443-494f-afc8-d4648ea33fa0.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1066.057339] env[69328]: DEBUG nova.objects.instance [None req-45ccd5e6-2000-49b0-9ef5-5f1f41d2adf6 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lazy-loading 'pci_requests' on Instance uuid dc050589-e37a-4798-9532-df4ecfab7eb1 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1066.058320] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-90ac8e4f-e6a6-4d71-90c8-f09f4c48a4de {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.072457] env[69328]: DEBUG nova.objects.base [None req-45ccd5e6-2000-49b0-9ef5-5f1f41d2adf6 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=69328) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1066.072635] env[69328]: DEBUG nova.network.neutron [None req-45ccd5e6-2000-49b0-9ef5-5f1f41d2adf6 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1066.084186] env[69328]: DEBUG oslo_vmware.api [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273949, 'name': Rename_Task, 'duration_secs': 0.152197} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.085373] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1066.085681] env[69328]: DEBUG oslo_vmware.api [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 1066.085681] env[69328]: value = "task-3273950" [ 1066.085681] env[69328]: _type = "Task" [ 1066.085681] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.085885] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1395d9e3-f1b1-4a49-9704-cde175dbd9a5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.095717] env[69328]: DEBUG oslo_vmware.api [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273950, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.096843] env[69328]: DEBUG oslo_vmware.api [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 1066.096843] env[69328]: value = "task-3273951" [ 1066.096843] env[69328]: _type = "Task" [ 1066.096843] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.104591] env[69328]: DEBUG oslo_vmware.api [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273951, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.156364] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1066.156607] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1066.156687] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1066.156837] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1066.156982] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1066.157149] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1066.157283] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69328) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1066.157425] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager.update_available_resource {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1066.167719] env[69328]: DEBUG nova.policy [None req-45ccd5e6-2000-49b0-9ef5-5f1f41d2adf6 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '69ca01fd1d0f42b0b05a5426da9753ad', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '30209bc93a4042488f15c73b7e4733d5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 1066.227285] env[69328]: INFO nova.compute.manager [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Took 33.52 seconds to build instance. [ 1066.341229] env[69328]: INFO nova.compute.manager [-] [instance: 6b9757de-a274-4f4d-9b73-cc2ca92b4732] Took 1.41 seconds to deallocate network for instance. [ 1066.368531] env[69328]: DEBUG nova.compute.manager [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1066.372342] env[69328]: DEBUG oslo_concurrency.lockutils [None req-94a5dc04-40f3-46a5-8303-2e27cc56aed2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.037s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1066.374702] env[69328]: DEBUG oslo_concurrency.lockutils [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 13.788s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1066.374804] env[69328]: DEBUG nova.objects.instance [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69328) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1066.401028] env[69328]: DEBUG nova.virt.hardware [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1066.401028] env[69328]: DEBUG nova.virt.hardware [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1066.401028] env[69328]: DEBUG nova.virt.hardware [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1066.401028] env[69328]: DEBUG nova.virt.hardware [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1066.401028] env[69328]: DEBUG nova.virt.hardware [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1066.401028] env[69328]: DEBUG nova.virt.hardware [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1066.401028] env[69328]: DEBUG nova.virt.hardware [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1066.401028] env[69328]: DEBUG nova.virt.hardware [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1066.401583] env[69328]: DEBUG nova.virt.hardware [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1066.402093] env[69328]: DEBUG nova.virt.hardware [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1066.402465] env[69328]: DEBUG nova.virt.hardware [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1066.403722] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1211570d-2995-45a1-bc0c-6e80e82ac9d6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.410924] env[69328]: INFO nova.scheduler.client.report [None req-94a5dc04-40f3-46a5-8303-2e27cc56aed2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Deleted allocations for instance ac0f967d-18c8-45d8-94ca-829a1fe11451 [ 1066.421271] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79d3cd38-b4e0-4f31-94ae-27f9e56a0007 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.151717] env[69328]: DEBUG nova.network.neutron [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Port e957681a-e4bc-4b9a-b2b7-a4783ae059b8 binding to destination host cpu-1 is already ACTIVE {{(pid=69328) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1067.156435] env[69328]: DEBUG nova.network.neutron [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Successfully updated port: 4516486f-d6cd-476a-a5ad-3d3fd9191731 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1067.156435] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1067.156435] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5c0c0845-3eb0-4548-9874-ce693246deca tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.465s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1067.156435] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f2a3fd15-3087-4441-80ac-6825cb2e8903 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Acquiring lock "refresh_cache-a0952fdf-5570-4112-bc4d-e9f9cee1599c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1067.156435] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f2a3fd15-3087-4441-80ac-6825cb2e8903 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Acquired lock "refresh_cache-a0952fdf-5570-4112-bc4d-e9f9cee1599c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1067.156435] env[69328]: DEBUG nova.network.neutron [None req-f2a3fd15-3087-4441-80ac-6825cb2e8903 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1067.157588] env[69328]: DEBUG oslo_concurrency.lockutils [None req-33ab74df-e53e-4903-b7a4-7d99e7d8c17d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1067.164765] env[69328]: DEBUG nova.compute.manager [req-b59f5893-1bd4-4da9-87a7-3c828fecdc11 req-bb1a4e6c-f229-4c7f-8b46-213c1f817ccb service nova] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Received event network-vif-plugged-4516486f-d6cd-476a-a5ad-3d3fd9191731 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1067.164765] env[69328]: DEBUG oslo_concurrency.lockutils [req-b59f5893-1bd4-4da9-87a7-3c828fecdc11 req-bb1a4e6c-f229-4c7f-8b46-213c1f817ccb service nova] Acquiring lock "275ef1ed-8e60-4151-b548-e22e5bd8efe2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1067.166224] env[69328]: DEBUG oslo_concurrency.lockutils [req-b59f5893-1bd4-4da9-87a7-3c828fecdc11 req-bb1a4e6c-f229-4c7f-8b46-213c1f817ccb service nova] Lock "275ef1ed-8e60-4151-b548-e22e5bd8efe2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1067.166224] env[69328]: DEBUG oslo_concurrency.lockutils [req-b59f5893-1bd4-4da9-87a7-3c828fecdc11 req-bb1a4e6c-f229-4c7f-8b46-213c1f817ccb service nova] Lock "275ef1ed-8e60-4151-b548-e22e5bd8efe2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1067.166224] env[69328]: DEBUG nova.compute.manager [req-b59f5893-1bd4-4da9-87a7-3c828fecdc11 req-bb1a4e6c-f229-4c7f-8b46-213c1f817ccb service nova] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] No waiting events found dispatching network-vif-plugged-4516486f-d6cd-476a-a5ad-3d3fd9191731 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1067.166224] env[69328]: WARNING nova.compute.manager [req-b59f5893-1bd4-4da9-87a7-3c828fecdc11 req-bb1a4e6c-f229-4c7f-8b46-213c1f817ccb service nova] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Received unexpected event network-vif-plugged-4516486f-d6cd-476a-a5ad-3d3fd9191731 for instance with vm_state building and task_state spawning. [ 1067.166364] env[69328]: DEBUG oslo_concurrency.lockutils [None req-94a5dc04-40f3-46a5-8303-2e27cc56aed2 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "ac0f967d-18c8-45d8-94ca-829a1fe11451" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.491s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1067.174522] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Acquiring lock "5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1067.174896] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Lock "5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1067.198637] env[69328]: DEBUG oslo_vmware.api [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273950, 'name': ReconfigVM_Task, 'duration_secs': 0.499997} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.199271] env[69328]: DEBUG oslo_vmware.api [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273951, 'name': PowerOnVM_Task, 'duration_secs': 0.540446} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.199927] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Reconfigured VM instance instance-0000004c to attach disk [datastore2] volume-68ea45a2-2443-494f-afc8-d4648ea33fa0/volume-68ea45a2-2443-494f-afc8-d4648ea33fa0.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1067.204922] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1067.205165] env[69328]: DEBUG nova.compute.manager [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1067.205758] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-343ae6f0-9c08-4a40-b881-111f6997965d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.216072] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7693761-e627-411c-9773-642cba4dbed0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.227857] env[69328]: DEBUG oslo_vmware.api [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 1067.227857] env[69328]: value = "task-3273952" [ 1067.227857] env[69328]: _type = "Task" [ 1067.227857] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.236648] env[69328]: DEBUG oslo_vmware.api [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273952, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.670656] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Acquiring lock "refresh_cache-275ef1ed-8e60-4151-b548-e22e5bd8efe2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1067.671336] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Acquired lock "refresh_cache-275ef1ed-8e60-4151-b548-e22e5bd8efe2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1067.671336] env[69328]: DEBUG nova.network.neutron [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1067.676979] env[69328]: DEBUG oslo_concurrency.lockutils [None req-efd75a83-efda-42a6-92e9-884c132c1864 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.302s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1067.678157] env[69328]: DEBUG nova.compute.manager [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1067.680483] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.424s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1067.681673] env[69328]: INFO nova.compute.claims [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1067.745377] env[69328]: DEBUG oslo_concurrency.lockutils [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1067.750927] env[69328]: DEBUG oslo_vmware.api [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273952, 'name': ReconfigVM_Task, 'duration_secs': 0.428431} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.751717] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653905', 'volume_id': '68ea45a2-2443-494f-afc8-d4648ea33fa0', 'name': 'volume-68ea45a2-2443-494f-afc8-d4648ea33fa0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '76210566-12d7-4f6a-afa1-6329e87e0f85', 'attached_at': '', 'detached_at': '', 'volume_id': '68ea45a2-2443-494f-afc8-d4648ea33fa0', 'serial': '68ea45a2-2443-494f-afc8-d4648ea33fa0'} {{(pid=69328) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1067.753757] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d6b20fce-e4f0-447c-ba44-1936c0d571b2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.761491] env[69328]: DEBUG oslo_vmware.api [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 1067.761491] env[69328]: value = "task-3273953" [ 1067.761491] env[69328]: _type = "Task" [ 1067.761491] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.768558] env[69328]: DEBUG oslo_vmware.api [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273953, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.874498] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c70083ce-df8d-48c6-9063-ceb41c197062 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "a7d4893f-31d4-449d-96d5-a2a1377d8454" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1067.874770] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c70083ce-df8d-48c6-9063-ceb41c197062 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "a7d4893f-31d4-449d-96d5-a2a1377d8454" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1067.874989] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c70083ce-df8d-48c6-9063-ceb41c197062 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "a7d4893f-31d4-449d-96d5-a2a1377d8454-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1067.875527] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c70083ce-df8d-48c6-9063-ceb41c197062 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "a7d4893f-31d4-449d-96d5-a2a1377d8454-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1067.875619] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c70083ce-df8d-48c6-9063-ceb41c197062 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "a7d4893f-31d4-449d-96d5-a2a1377d8454-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1067.878392] env[69328]: INFO nova.compute.manager [None req-c70083ce-df8d-48c6-9063-ceb41c197062 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] Terminating instance [ 1067.909693] env[69328]: DEBUG nova.network.neutron [None req-f2a3fd15-3087-4441-80ac-6825cb2e8903 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Updating instance_info_cache with network_info: [{"id": "c74af0b7-ebfb-4563-9208-a18235899a6c", "address": "fa:16:3e:35:bb:fc", "network": {"id": "cc75e08f-f0f3-4b52-9b40-0de73f044554", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1326858830-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1393040bf5304571ae4b66d0a4ee7b6e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "10b81051-1eb1-406b-888c-4548c470c77e", "external-id": "nsx-vlan-transportzone-207", "segmentation_id": 207, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc74af0b7-eb", "ovs_interfaceid": "c74af0b7-ebfb-4563-9208-a18235899a6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1068.157883] env[69328]: DEBUG nova.compute.manager [req-8594e28c-0025-4ec8-bb6d-666e8fd578d5 req-39f00493-2ffe-4ea1-9264-99726b78779a service nova] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Received event network-vif-plugged-55e62774-2eb7-4bcb-92f6-a63fc6216cda {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1068.158087] env[69328]: DEBUG oslo_concurrency.lockutils [req-8594e28c-0025-4ec8-bb6d-666e8fd578d5 req-39f00493-2ffe-4ea1-9264-99726b78779a service nova] Acquiring lock "dc050589-e37a-4798-9532-df4ecfab7eb1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1068.158296] env[69328]: DEBUG oslo_concurrency.lockutils [req-8594e28c-0025-4ec8-bb6d-666e8fd578d5 req-39f00493-2ffe-4ea1-9264-99726b78779a service nova] Lock "dc050589-e37a-4798-9532-df4ecfab7eb1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1068.158463] env[69328]: DEBUG oslo_concurrency.lockutils [req-8594e28c-0025-4ec8-bb6d-666e8fd578d5 req-39f00493-2ffe-4ea1-9264-99726b78779a service nova] Lock "dc050589-e37a-4798-9532-df4ecfab7eb1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1068.158631] env[69328]: DEBUG nova.compute.manager [req-8594e28c-0025-4ec8-bb6d-666e8fd578d5 req-39f00493-2ffe-4ea1-9264-99726b78779a service nova] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] No waiting events found dispatching network-vif-plugged-55e62774-2eb7-4bcb-92f6-a63fc6216cda {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1068.158793] env[69328]: WARNING nova.compute.manager [req-8594e28c-0025-4ec8-bb6d-666e8fd578d5 req-39f00493-2ffe-4ea1-9264-99726b78779a service nova] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Received unexpected event network-vif-plugged-55e62774-2eb7-4bcb-92f6-a63fc6216cda for instance with vm_state active and task_state None. [ 1068.195092] env[69328]: DEBUG nova.network.neutron [None req-45ccd5e6-2000-49b0-9ef5-5f1f41d2adf6 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Successfully updated port: 55e62774-2eb7-4bcb-92f6-a63fc6216cda {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1068.205467] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "204286d7-c806-48cb-85e9-b2a78571777c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1068.205706] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "204286d7-c806-48cb-85e9-b2a78571777c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1068.205883] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "204286d7-c806-48cb-85e9-b2a78571777c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1068.224364] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1068.234954] env[69328]: DEBUG nova.network.neutron [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1068.270210] env[69328]: DEBUG oslo_vmware.api [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273953, 'name': Rename_Task, 'duration_secs': 0.162321} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.272023] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1068.272023] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e558df75-d167-400a-8f67-4b229fcac4a2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.278950] env[69328]: DEBUG oslo_vmware.api [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 1068.278950] env[69328]: value = "task-3273954" [ 1068.278950] env[69328]: _type = "Task" [ 1068.278950] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.286979] env[69328]: DEBUG oslo_vmware.api [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273954, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.383167] env[69328]: DEBUG nova.compute.manager [None req-c70083ce-df8d-48c6-9063-ceb41c197062 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1068.383432] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c70083ce-df8d-48c6-9063-ceb41c197062 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1068.384749] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36aa29a4-5ef7-4703-92f5-a3bbd8fa1e3f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.392467] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-c70083ce-df8d-48c6-9063-ceb41c197062 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1068.393579] env[69328]: DEBUG nova.network.neutron [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Updating instance_info_cache with network_info: [{"id": "4516486f-d6cd-476a-a5ad-3d3fd9191731", "address": "fa:16:3e:3f:3a:9a", "network": {"id": "c37f7cbb-9e72-43fb-b82a-5602208856c5", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1726899944-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e9e98f83e974a32b0db6ce5e8442012", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4fc0575-c8e3-4f3c-b2e1-e10ac2d0cc1a", "external-id": "nsx-vlan-transportzone-256", "segmentation_id": 256, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4516486f-d6", "ovs_interfaceid": "4516486f-d6cd-476a-a5ad-3d3fd9191731", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1068.394767] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a82c1ea3-cb66-4100-a5a0-edb992f987a8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.402111] env[69328]: DEBUG oslo_vmware.api [None req-c70083ce-df8d-48c6-9063-ceb41c197062 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 1068.402111] env[69328]: value = "task-3273955" [ 1068.402111] env[69328]: _type = "Task" [ 1068.402111] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.411417] env[69328]: DEBUG oslo_vmware.api [None req-c70083ce-df8d-48c6-9063-ceb41c197062 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273955, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.411946] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f2a3fd15-3087-4441-80ac-6825cb2e8903 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Releasing lock "refresh_cache-a0952fdf-5570-4112-bc4d-e9f9cee1599c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1068.412958] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f9e40e9-cf27-4372-8e1d-62947e4d21a7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.420431] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f2a3fd15-3087-4441-80ac-6825cb2e8903 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Resuming the VM {{(pid=69328) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1068.420727] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f1069c1e-de29-40d4-8747-fabef45bda2f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.426512] env[69328]: DEBUG oslo_vmware.api [None req-f2a3fd15-3087-4441-80ac-6825cb2e8903 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Waiting for the task: (returnval){ [ 1068.426512] env[69328]: value = "task-3273956" [ 1068.426512] env[69328]: _type = "Task" [ 1068.426512] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.434487] env[69328]: DEBUG oslo_vmware.api [None req-f2a3fd15-3087-4441-80ac-6825cb2e8903 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273956, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.704357] env[69328]: DEBUG oslo_concurrency.lockutils [None req-45ccd5e6-2000-49b0-9ef5-5f1f41d2adf6 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "refresh_cache-dc050589-e37a-4798-9532-df4ecfab7eb1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1068.704530] env[69328]: DEBUG oslo_concurrency.lockutils [None req-45ccd5e6-2000-49b0-9ef5-5f1f41d2adf6 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquired lock "refresh_cache-dc050589-e37a-4798-9532-df4ecfab7eb1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1068.704709] env[69328]: DEBUG nova.network.neutron [None req-45ccd5e6-2000-49b0-9ef5-5f1f41d2adf6 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1068.791583] env[69328]: DEBUG oslo_vmware.api [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273954, 'name': PowerOnVM_Task} progress is 94%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.898293] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Releasing lock "refresh_cache-275ef1ed-8e60-4151-b548-e22e5bd8efe2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1068.898293] env[69328]: DEBUG nova.compute.manager [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Instance network_info: |[{"id": "4516486f-d6cd-476a-a5ad-3d3fd9191731", "address": "fa:16:3e:3f:3a:9a", "network": {"id": "c37f7cbb-9e72-43fb-b82a-5602208856c5", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1726899944-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e9e98f83e974a32b0db6ce5e8442012", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4fc0575-c8e3-4f3c-b2e1-e10ac2d0cc1a", "external-id": "nsx-vlan-transportzone-256", "segmentation_id": 256, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4516486f-d6", "ovs_interfaceid": "4516486f-d6cd-476a-a5ad-3d3fd9191731", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1068.898293] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3f:3a:9a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a4fc0575-c8e3-4f3c-b2e1-e10ac2d0cc1a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4516486f-d6cd-476a-a5ad-3d3fd9191731', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1068.905744] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Creating folder: Project (0e9e98f83e974a32b0db6ce5e8442012). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1068.908515] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e9d280cd-7768-4989-8045-442638feebbe {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.919138] env[69328]: DEBUG oslo_vmware.api [None req-c70083ce-df8d-48c6-9063-ceb41c197062 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273955, 'name': PowerOffVM_Task, 'duration_secs': 0.218769} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.919396] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-c70083ce-df8d-48c6-9063-ceb41c197062 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1068.919561] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c70083ce-df8d-48c6-9063-ceb41c197062 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1068.919847] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f041b14c-a8b1-430a-b7de-2946df469778 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.924598] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Created folder: Project (0e9e98f83e974a32b0db6ce5e8442012) in parent group-v653649. [ 1068.924779] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Creating folder: Instances. Parent ref: group-v653921. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1068.925540] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3dcc6830-215b-45c2-94ff-afca8de69771 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.935499] env[69328]: DEBUG oslo_vmware.api [None req-f2a3fd15-3087-4441-80ac-6825cb2e8903 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273956, 'name': PowerOnVM_Task} progress is 93%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.939318] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Created folder: Instances in parent group-v653921. [ 1068.939567] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1068.939756] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1068.939994] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f66ddafe-2bfe-4407-b0d0-09363f494f59 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.960791] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1068.960791] env[69328]: value = "task-3273960" [ 1068.960791] env[69328]: _type = "Task" [ 1068.960791] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.969802] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273960, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.012987] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c70083ce-df8d-48c6-9063-ceb41c197062 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1069.013320] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c70083ce-df8d-48c6-9063-ceb41c197062 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1069.013417] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-c70083ce-df8d-48c6-9063-ceb41c197062 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Deleting the datastore file [datastore1] a7d4893f-31d4-449d-96d5-a2a1377d8454 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1069.013682] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-82fad0ef-74b8-4b18-9e81-433c89d7cee0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.022547] env[69328]: DEBUG oslo_vmware.api [None req-c70083ce-df8d-48c6-9063-ceb41c197062 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 1069.022547] env[69328]: value = "task-3273961" [ 1069.022547] env[69328]: _type = "Task" [ 1069.022547] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.036112] env[69328]: DEBUG oslo_vmware.api [None req-c70083ce-df8d-48c6-9063-ceb41c197062 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273961, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.065520] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8fe99d8-77fa-48b1-b9a2-c47c75b9e4bf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.075015] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d799f7ac-5a85-4e0d-a33f-da19a0b8c72f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.114793] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0243458a-bc33-4806-ad13-c28ebd3955a8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.125043] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6d0b047-9599-489c-af18-a9569420b56c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.140944] env[69328]: DEBUG nova.compute.provider_tree [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1069.245687] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "refresh_cache-204286d7-c806-48cb-85e9-b2a78571777c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1069.245948] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquired lock "refresh_cache-204286d7-c806-48cb-85e9-b2a78571777c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1069.246129] env[69328]: DEBUG nova.network.neutron [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1069.253989] env[69328]: WARNING nova.network.neutron [None req-45ccd5e6-2000-49b0-9ef5-5f1f41d2adf6 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] aed15283-4a79-4e99-8b6c-49cf754138de already exists in list: networks containing: ['aed15283-4a79-4e99-8b6c-49cf754138de']. ignoring it [ 1069.254387] env[69328]: WARNING nova.network.neutron [None req-45ccd5e6-2000-49b0-9ef5-5f1f41d2adf6 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] aed15283-4a79-4e99-8b6c-49cf754138de already exists in list: networks containing: ['aed15283-4a79-4e99-8b6c-49cf754138de']. ignoring it [ 1069.295396] env[69328]: DEBUG oslo_vmware.api [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3273954, 'name': PowerOnVM_Task, 'duration_secs': 0.621976} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.295531] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1069.424817] env[69328]: DEBUG nova.compute.manager [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1069.425816] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f319b10-85b8-43fb-9c56-3171602c326f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.437839] env[69328]: DEBUG oslo_vmware.api [None req-f2a3fd15-3087-4441-80ac-6825cb2e8903 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273956, 'name': PowerOnVM_Task, 'duration_secs': 0.539663} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.439547] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f2a3fd15-3087-4441-80ac-6825cb2e8903 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Resumed the VM {{(pid=69328) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1069.439763] env[69328]: DEBUG nova.compute.manager [None req-f2a3fd15-3087-4441-80ac-6825cb2e8903 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1069.443353] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2c9ae38-827e-427e-a64f-213a26fde17d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.475352] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273960, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.509847] env[69328]: DEBUG nova.compute.manager [req-5c62a8ef-9c2f-418a-be5a-8957546b3315 req-84bde15c-8327-44ba-bb1d-23f6bd7bd1f8 service nova] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Received event network-changed-4516486f-d6cd-476a-a5ad-3d3fd9191731 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1069.510029] env[69328]: DEBUG nova.compute.manager [req-5c62a8ef-9c2f-418a-be5a-8957546b3315 req-84bde15c-8327-44ba-bb1d-23f6bd7bd1f8 service nova] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Refreshing instance network info cache due to event network-changed-4516486f-d6cd-476a-a5ad-3d3fd9191731. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1069.510252] env[69328]: DEBUG oslo_concurrency.lockutils [req-5c62a8ef-9c2f-418a-be5a-8957546b3315 req-84bde15c-8327-44ba-bb1d-23f6bd7bd1f8 service nova] Acquiring lock "refresh_cache-275ef1ed-8e60-4151-b548-e22e5bd8efe2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1069.510388] env[69328]: DEBUG oslo_concurrency.lockutils [req-5c62a8ef-9c2f-418a-be5a-8957546b3315 req-84bde15c-8327-44ba-bb1d-23f6bd7bd1f8 service nova] Acquired lock "refresh_cache-275ef1ed-8e60-4151-b548-e22e5bd8efe2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1069.510543] env[69328]: DEBUG nova.network.neutron [req-5c62a8ef-9c2f-418a-be5a-8957546b3315 req-84bde15c-8327-44ba-bb1d-23f6bd7bd1f8 service nova] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Refreshing network info cache for port 4516486f-d6cd-476a-a5ad-3d3fd9191731 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1069.535082] env[69328]: DEBUG oslo_vmware.api [None req-c70083ce-df8d-48c6-9063-ceb41c197062 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3273961, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.446488} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.535325] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-c70083ce-df8d-48c6-9063-ceb41c197062 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1069.535506] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c70083ce-df8d-48c6-9063-ceb41c197062 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1069.535675] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c70083ce-df8d-48c6-9063-ceb41c197062 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1069.535838] env[69328]: INFO nova.compute.manager [None req-c70083ce-df8d-48c6-9063-ceb41c197062 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1069.536082] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c70083ce-df8d-48c6-9063-ceb41c197062 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1069.536271] env[69328]: DEBUG nova.compute.manager [-] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1069.536357] env[69328]: DEBUG nova.network.neutron [-] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1069.644683] env[69328]: DEBUG nova.scheduler.client.report [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1069.956208] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d15312fc-57bc-4b29-82c2-c68318c2f431 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lock "76210566-12d7-4f6a-afa1-6329e87e0f85" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 48.856s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1069.963284] env[69328]: DEBUG nova.network.neutron [None req-45ccd5e6-2000-49b0-9ef5-5f1f41d2adf6 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Updating instance_info_cache with network_info: [{"id": "95776220-5fd9-42a1-8bf9-cfb9fe49d62d", "address": "fa:16:3e:0c:f4:26", "network": {"id": "aed15283-4a79-4e99-8b6c-49cf754138de", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1271042528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.130", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30209bc93a4042488f15c73b7e4733d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap95776220-5f", "ovs_interfaceid": "95776220-5fd9-42a1-8bf9-cfb9fe49d62d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "2febedad-c6fa-48cf-893b-6baa5b6ddcd6", "address": "fa:16:3e:d1:26:ae", "network": {"id": "aed15283-4a79-4e99-8b6c-49cf754138de", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1271042528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30209bc93a4042488f15c73b7e4733d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2febedad-c6", "ovs_interfaceid": "2febedad-c6fa-48cf-893b-6baa5b6ddcd6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "55e62774-2eb7-4bcb-92f6-a63fc6216cda", "address": "fa:16:3e:44:d7:6a", "network": {"id": "aed15283-4a79-4e99-8b6c-49cf754138de", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1271042528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30209bc93a4042488f15c73b7e4733d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55e62774-2e", "ovs_interfaceid": "55e62774-2eb7-4bcb-92f6-a63fc6216cda", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1069.974216] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273960, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.047715] env[69328]: DEBUG nova.network.neutron [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Updating instance_info_cache with network_info: [{"id": "e957681a-e4bc-4b9a-b2b7-a4783ae059b8", "address": "fa:16:3e:be:0a:24", "network": {"id": "620f277d-ca49-41da-83a0-afb8c393e26e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1223326239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cdc479a290524130b9d17e627a64b65a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86a35d07-53d3-46b3-92cb-ae34236c0f41", "external-id": "nsx-vlan-transportzone-811", "segmentation_id": 811, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape957681a-e4", "ovs_interfaceid": "e957681a-e4bc-4b9a-b2b7-a4783ae059b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1070.149944] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.470s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1070.150460] env[69328]: DEBUG nova.compute.manager [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1070.153266] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c83f3c34-5732-4221-98a5-17e4bc0a9442 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.123s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1070.153526] env[69328]: DEBUG nova.objects.instance [None req-c83f3c34-5732-4221-98a5-17e4bc0a9442 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lazy-loading 'resources' on Instance uuid 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1070.211611] env[69328]: DEBUG nova.network.neutron [req-5c62a8ef-9c2f-418a-be5a-8957546b3315 req-84bde15c-8327-44ba-bb1d-23f6bd7bd1f8 service nova] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Updated VIF entry in instance network info cache for port 4516486f-d6cd-476a-a5ad-3d3fd9191731. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1070.211611] env[69328]: DEBUG nova.network.neutron [req-5c62a8ef-9c2f-418a-be5a-8957546b3315 req-84bde15c-8327-44ba-bb1d-23f6bd7bd1f8 service nova] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Updating instance_info_cache with network_info: [{"id": "4516486f-d6cd-476a-a5ad-3d3fd9191731", "address": "fa:16:3e:3f:3a:9a", "network": {"id": "c37f7cbb-9e72-43fb-b82a-5602208856c5", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1726899944-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e9e98f83e974a32b0db6ce5e8442012", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4fc0575-c8e3-4f3c-b2e1-e10ac2d0cc1a", "external-id": "nsx-vlan-transportzone-256", "segmentation_id": 256, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4516486f-d6", "ovs_interfaceid": "4516486f-d6cd-476a-a5ad-3d3fd9191731", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1070.325906] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2dc4421f-2143-4501-8780-44e332749e26 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquiring lock "c7321021-15ea-47f4-a8ca-1045f2966394" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1070.326359] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2dc4421f-2143-4501-8780-44e332749e26 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Lock "c7321021-15ea-47f4-a8ca-1045f2966394" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1070.326744] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2dc4421f-2143-4501-8780-44e332749e26 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquiring lock "c7321021-15ea-47f4-a8ca-1045f2966394-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1070.326953] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2dc4421f-2143-4501-8780-44e332749e26 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Lock "c7321021-15ea-47f4-a8ca-1045f2966394-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1070.327158] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2dc4421f-2143-4501-8780-44e332749e26 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Lock "c7321021-15ea-47f4-a8ca-1045f2966394-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1070.329224] env[69328]: INFO nova.compute.manager [None req-2dc4421f-2143-4501-8780-44e332749e26 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Terminating instance [ 1070.371524] env[69328]: DEBUG nova.compute.manager [req-0698156f-dba0-4a1e-9ff9-a0d3fb51eca8 req-1b962072-326e-43f1-9f34-9362b602050b service nova] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Received event network-changed-55e62774-2eb7-4bcb-92f6-a63fc6216cda {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1070.371524] env[69328]: DEBUG nova.compute.manager [req-0698156f-dba0-4a1e-9ff9-a0d3fb51eca8 req-1b962072-326e-43f1-9f34-9362b602050b service nova] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Refreshing instance network info cache due to event network-changed-55e62774-2eb7-4bcb-92f6-a63fc6216cda. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1070.371652] env[69328]: DEBUG oslo_concurrency.lockutils [req-0698156f-dba0-4a1e-9ff9-a0d3fb51eca8 req-1b962072-326e-43f1-9f34-9362b602050b service nova] Acquiring lock "refresh_cache-dc050589-e37a-4798-9532-df4ecfab7eb1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1070.395733] env[69328]: DEBUG nova.network.neutron [-] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1070.468926] env[69328]: DEBUG oslo_concurrency.lockutils [None req-45ccd5e6-2000-49b0-9ef5-5f1f41d2adf6 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Releasing lock "refresh_cache-dc050589-e37a-4798-9532-df4ecfab7eb1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1070.469702] env[69328]: DEBUG oslo_concurrency.lockutils [None req-45ccd5e6-2000-49b0-9ef5-5f1f41d2adf6 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "dc050589-e37a-4798-9532-df4ecfab7eb1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1070.469906] env[69328]: DEBUG oslo_concurrency.lockutils [None req-45ccd5e6-2000-49b0-9ef5-5f1f41d2adf6 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquired lock "dc050589-e37a-4798-9532-df4ecfab7eb1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1070.470199] env[69328]: DEBUG oslo_concurrency.lockutils [req-0698156f-dba0-4a1e-9ff9-a0d3fb51eca8 req-1b962072-326e-43f1-9f34-9362b602050b service nova] Acquired lock "refresh_cache-dc050589-e37a-4798-9532-df4ecfab7eb1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1070.470381] env[69328]: DEBUG nova.network.neutron [req-0698156f-dba0-4a1e-9ff9-a0d3fb51eca8 req-1b962072-326e-43f1-9f34-9362b602050b service nova] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Refreshing network info cache for port 55e62774-2eb7-4bcb-92f6-a63fc6216cda {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1070.472407] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-370b3dc3-02bc-4762-9f55-4ea83667de6b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.480353] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273960, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.493229] env[69328]: DEBUG nova.virt.hardware [None req-45ccd5e6-2000-49b0-9ef5-5f1f41d2adf6 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1070.493468] env[69328]: DEBUG nova.virt.hardware [None req-45ccd5e6-2000-49b0-9ef5-5f1f41d2adf6 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1070.493625] env[69328]: DEBUG nova.virt.hardware [None req-45ccd5e6-2000-49b0-9ef5-5f1f41d2adf6 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1070.493806] env[69328]: DEBUG nova.virt.hardware [None req-45ccd5e6-2000-49b0-9ef5-5f1f41d2adf6 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1070.493955] env[69328]: DEBUG nova.virt.hardware [None req-45ccd5e6-2000-49b0-9ef5-5f1f41d2adf6 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1070.494169] env[69328]: DEBUG nova.virt.hardware [None req-45ccd5e6-2000-49b0-9ef5-5f1f41d2adf6 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1070.494417] env[69328]: DEBUG nova.virt.hardware [None req-45ccd5e6-2000-49b0-9ef5-5f1f41d2adf6 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1070.494582] env[69328]: DEBUG nova.virt.hardware [None req-45ccd5e6-2000-49b0-9ef5-5f1f41d2adf6 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1070.494750] env[69328]: DEBUG nova.virt.hardware [None req-45ccd5e6-2000-49b0-9ef5-5f1f41d2adf6 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1070.494914] env[69328]: DEBUG nova.virt.hardware [None req-45ccd5e6-2000-49b0-9ef5-5f1f41d2adf6 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1070.495104] env[69328]: DEBUG nova.virt.hardware [None req-45ccd5e6-2000-49b0-9ef5-5f1f41d2adf6 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1070.501380] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-45ccd5e6-2000-49b0-9ef5-5f1f41d2adf6 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Reconfiguring VM to attach interface {{(pid=69328) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1070.502180] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cdf4b64b-d099-493a-a882-40c93d5d9efd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.519262] env[69328]: DEBUG oslo_vmware.api [None req-45ccd5e6-2000-49b0-9ef5-5f1f41d2adf6 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for the task: (returnval){ [ 1070.519262] env[69328]: value = "task-3273962" [ 1070.519262] env[69328]: _type = "Task" [ 1070.519262] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.526900] env[69328]: DEBUG oslo_vmware.api [None req-45ccd5e6-2000-49b0-9ef5-5f1f41d2adf6 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273962, 'name': ReconfigVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.550577] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Releasing lock "refresh_cache-204286d7-c806-48cb-85e9-b2a78571777c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1070.657620] env[69328]: DEBUG nova.compute.utils [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1070.663100] env[69328]: DEBUG nova.compute.manager [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1070.663100] env[69328]: DEBUG nova.network.neutron [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1070.707508] env[69328]: DEBUG nova.policy [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '435c64c503c043a29f90396ad3b070d9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '87581f423dc64e4fb9fe1d51ebc68597', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 1070.714126] env[69328]: DEBUG oslo_concurrency.lockutils [req-5c62a8ef-9c2f-418a-be5a-8957546b3315 req-84bde15c-8327-44ba-bb1d-23f6bd7bd1f8 service nova] Releasing lock "refresh_cache-275ef1ed-8e60-4151-b548-e22e5bd8efe2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1070.714455] env[69328]: DEBUG nova.compute.manager [req-5c62a8ef-9c2f-418a-be5a-8957546b3315 req-84bde15c-8327-44ba-bb1d-23f6bd7bd1f8 service nova] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Received event network-changed-d779425b-180c-47fd-b307-e02e14f18a26 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1070.714629] env[69328]: DEBUG nova.compute.manager [req-5c62a8ef-9c2f-418a-be5a-8957546b3315 req-84bde15c-8327-44ba-bb1d-23f6bd7bd1f8 service nova] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Refreshing instance network info cache due to event network-changed-d779425b-180c-47fd-b307-e02e14f18a26. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1070.715377] env[69328]: DEBUG oslo_concurrency.lockutils [req-5c62a8ef-9c2f-418a-be5a-8957546b3315 req-84bde15c-8327-44ba-bb1d-23f6bd7bd1f8 service nova] Acquiring lock "refresh_cache-dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1070.715377] env[69328]: DEBUG oslo_concurrency.lockutils [req-5c62a8ef-9c2f-418a-be5a-8957546b3315 req-84bde15c-8327-44ba-bb1d-23f6bd7bd1f8 service nova] Acquired lock "refresh_cache-dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1070.715377] env[69328]: DEBUG nova.network.neutron [req-5c62a8ef-9c2f-418a-be5a-8957546b3315 req-84bde15c-8327-44ba-bb1d-23f6bd7bd1f8 service nova] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Refreshing network info cache for port d779425b-180c-47fd-b307-e02e14f18a26 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1070.835517] env[69328]: DEBUG nova.compute.manager [None req-2dc4421f-2143-4501-8780-44e332749e26 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1070.835859] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2dc4421f-2143-4501-8780-44e332749e26 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1070.839677] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b0e2466-6a9f-4ee9-828f-ccc2d58cd4e9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.848497] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dc4421f-2143-4501-8780-44e332749e26 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1070.848779] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-56c9e56a-ca6e-43b8-8adf-e993a6f9d08d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.855306] env[69328]: DEBUG oslo_vmware.api [None req-2dc4421f-2143-4501-8780-44e332749e26 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 1070.855306] env[69328]: value = "task-3273963" [ 1070.855306] env[69328]: _type = "Task" [ 1070.855306] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.867657] env[69328]: DEBUG oslo_vmware.api [None req-2dc4421f-2143-4501-8780-44e332749e26 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273963, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.898316] env[69328]: INFO nova.compute.manager [-] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] Took 1.36 seconds to deallocate network for instance. [ 1070.980322] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273960, 'name': CreateVM_Task, 'duration_secs': 1.561251} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.980503] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1070.981427] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1070.981610] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1070.981930] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1070.982203] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e430077-bfef-434d-be8c-88c154137162 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.987776] env[69328]: DEBUG oslo_vmware.api [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Waiting for the task: (returnval){ [ 1070.987776] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52c6f3f6-ac5d-77bb-31f3-421472d15062" [ 1070.987776] env[69328]: _type = "Task" [ 1070.987776] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.995911] env[69328]: DEBUG oslo_vmware.api [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52c6f3f6-ac5d-77bb-31f3-421472d15062, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.005850] env[69328]: DEBUG nova.network.neutron [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Successfully created port: 23a25695-a7ad-41dd-b5a1-29ee8d22538e {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1071.026834] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f81503d3-8c9e-401a-9d63-731f40cc7311 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.033074] env[69328]: DEBUG oslo_vmware.api [None req-45ccd5e6-2000-49b0-9ef5-5f1f41d2adf6 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273962, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.038792] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3dcced5-07ef-45aa-bcef-3749f553df9f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.075253] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81e0cf15-5acc-4375-b2d4-e31a7ea82d1a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.086887] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69f565b2-b2d9-4232-b4c3-2cec11f8981b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.091920] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d04fe24-4fd3-40c5-afda-c338e1c099ac {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.105329] env[69328]: DEBUG nova.compute.provider_tree [None req-c83f3c34-5732-4221-98a5-17e4bc0a9442 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1071.122842] env[69328]: DEBUG nova.scheduler.client.report [None req-c83f3c34-5732-4221-98a5-17e4bc0a9442 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1071.126417] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90a307da-0fbd-4790-ad4b-e65ae7aedb73 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.134567] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Updating instance '204286d7-c806-48cb-85e9-b2a78571777c' progress to 83 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1071.162841] env[69328]: DEBUG nova.compute.manager [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1071.346890] env[69328]: DEBUG nova.network.neutron [req-0698156f-dba0-4a1e-9ff9-a0d3fb51eca8 req-1b962072-326e-43f1-9f34-9362b602050b service nova] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Updated VIF entry in instance network info cache for port 55e62774-2eb7-4bcb-92f6-a63fc6216cda. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1071.347384] env[69328]: DEBUG nova.network.neutron [req-0698156f-dba0-4a1e-9ff9-a0d3fb51eca8 req-1b962072-326e-43f1-9f34-9362b602050b service nova] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Updating instance_info_cache with network_info: [{"id": "95776220-5fd9-42a1-8bf9-cfb9fe49d62d", "address": "fa:16:3e:0c:f4:26", "network": {"id": "aed15283-4a79-4e99-8b6c-49cf754138de", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1271042528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.130", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30209bc93a4042488f15c73b7e4733d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap95776220-5f", "ovs_interfaceid": "95776220-5fd9-42a1-8bf9-cfb9fe49d62d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "2febedad-c6fa-48cf-893b-6baa5b6ddcd6", "address": "fa:16:3e:d1:26:ae", "network": {"id": "aed15283-4a79-4e99-8b6c-49cf754138de", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1271042528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30209bc93a4042488f15c73b7e4733d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2febedad-c6", "ovs_interfaceid": "2febedad-c6fa-48cf-893b-6baa5b6ddcd6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "55e62774-2eb7-4bcb-92f6-a63fc6216cda", "address": "fa:16:3e:44:d7:6a", "network": {"id": "aed15283-4a79-4e99-8b6c-49cf754138de", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1271042528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30209bc93a4042488f15c73b7e4733d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55e62774-2e", "ovs_interfaceid": "55e62774-2eb7-4bcb-92f6-a63fc6216cda", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1071.368787] env[69328]: DEBUG oslo_vmware.api [None req-2dc4421f-2143-4501-8780-44e332749e26 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273963, 'name': PowerOffVM_Task, 'duration_secs': 0.206213} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.369502] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dc4421f-2143-4501-8780-44e332749e26 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1071.369728] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2dc4421f-2143-4501-8780-44e332749e26 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1071.369987] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c17791fa-d6d6-4953-96cb-12e474f9004b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.407694] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c70083ce-df8d-48c6-9063-ceb41c197062 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1071.431971] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2dc4421f-2143-4501-8780-44e332749e26 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1071.432263] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2dc4421f-2143-4501-8780-44e332749e26 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1071.432491] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-2dc4421f-2143-4501-8780-44e332749e26 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Deleting the datastore file [datastore2] c7321021-15ea-47f4-a8ca-1045f2966394 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1071.432679] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c53848d1-2220-4940-934a-44e4a7a18e5c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.445276] env[69328]: DEBUG oslo_vmware.api [None req-2dc4421f-2143-4501-8780-44e332749e26 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 1071.445276] env[69328]: value = "task-3273965" [ 1071.445276] env[69328]: _type = "Task" [ 1071.445276] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.452686] env[69328]: DEBUG oslo_vmware.api [None req-2dc4421f-2143-4501-8780-44e332749e26 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273965, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.498690] env[69328]: DEBUG oslo_vmware.api [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52c6f3f6-ac5d-77bb-31f3-421472d15062, 'name': SearchDatastore_Task, 'duration_secs': 0.011017} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.498988] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1071.499240] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1071.499476] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1071.499623] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1071.499795] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1071.500069] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ca38702f-62fc-45d8-874f-99cc34227b98 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.508908] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1071.509163] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1071.510039] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd16b084-4cfe-48fb-b621-c3eed3be5f74 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.515933] env[69328]: DEBUG oslo_vmware.api [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Waiting for the task: (returnval){ [ 1071.515933] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52fdb095-8992-1910-290e-bec15e7bdc71" [ 1071.515933] env[69328]: _type = "Task" [ 1071.515933] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.527870] env[69328]: DEBUG oslo_vmware.api [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52fdb095-8992-1910-290e-bec15e7bdc71, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.535620] env[69328]: DEBUG oslo_vmware.api [None req-45ccd5e6-2000-49b0-9ef5-5f1f41d2adf6 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273962, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.587360] env[69328]: DEBUG nova.network.neutron [req-5c62a8ef-9c2f-418a-be5a-8957546b3315 req-84bde15c-8327-44ba-bb1d-23f6bd7bd1f8 service nova] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Updated VIF entry in instance network info cache for port d779425b-180c-47fd-b307-e02e14f18a26. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1071.587744] env[69328]: DEBUG nova.network.neutron [req-5c62a8ef-9c2f-418a-be5a-8957546b3315 req-84bde15c-8327-44ba-bb1d-23f6bd7bd1f8 service nova] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Updating instance_info_cache with network_info: [{"id": "d779425b-180c-47fd-b307-e02e14f18a26", "address": "fa:16:3e:47:6b:17", "network": {"id": "4031def8-0553-461f-9528-aa338b9d7b2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1834835647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.213", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f357b5a9494b4849a83aa934c5d4e26b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "357d2811-e990-4985-9f9e-b158d10d3699", "external-id": "nsx-vlan-transportzone-641", "segmentation_id": 641, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd779425b-18", "ovs_interfaceid": "d779425b-180c-47fd-b307-e02e14f18a26", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1071.631651] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c83f3c34-5732-4221-98a5-17e4bc0a9442 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.478s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1071.633855] env[69328]: DEBUG oslo_concurrency.lockutils [None req-81971890-40ec-481a-8320-72567755728d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.781s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1071.634406] env[69328]: DEBUG nova.objects.instance [None req-81971890-40ec-481a-8320-72567755728d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lazy-loading 'resources' on Instance uuid 14521ee3-d749-48b4-aeec-23c94ca2cf9f {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1071.640247] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1071.640811] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dd3b38e1-50f7-43ca-9f33-ccc06965acb3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.649875] env[69328]: DEBUG oslo_vmware.api [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 1071.649875] env[69328]: value = "task-3273966" [ 1071.649875] env[69328]: _type = "Task" [ 1071.649875] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.658251] env[69328]: DEBUG oslo_vmware.api [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273966, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.659908] env[69328]: INFO nova.scheduler.client.report [None req-c83f3c34-5732-4221-98a5-17e4bc0a9442 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Deleted allocations for instance 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e [ 1071.850382] env[69328]: DEBUG oslo_concurrency.lockutils [req-0698156f-dba0-4a1e-9ff9-a0d3fb51eca8 req-1b962072-326e-43f1-9f34-9362b602050b service nova] Releasing lock "refresh_cache-dc050589-e37a-4798-9532-df4ecfab7eb1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1071.850676] env[69328]: DEBUG nova.compute.manager [req-0698156f-dba0-4a1e-9ff9-a0d3fb51eca8 req-1b962072-326e-43f1-9f34-9362b602050b service nova] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] Received event network-vif-deleted-cbad07b8-acca-4410-abd7-78b9b5a05849 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1071.850887] env[69328]: INFO nova.compute.manager [req-0698156f-dba0-4a1e-9ff9-a0d3fb51eca8 req-1b962072-326e-43f1-9f34-9362b602050b service nova] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] Neutron deleted interface cbad07b8-acca-4410-abd7-78b9b5a05849; detaching it from the instance and deleting it from the info cache [ 1071.851087] env[69328]: DEBUG nova.network.neutron [req-0698156f-dba0-4a1e-9ff9-a0d3fb51eca8 req-1b962072-326e-43f1-9f34-9362b602050b service nova] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1071.953692] env[69328]: DEBUG oslo_vmware.api [None req-2dc4421f-2143-4501-8780-44e332749e26 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273965, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14468} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.953969] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-2dc4421f-2143-4501-8780-44e332749e26 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1071.954274] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2dc4421f-2143-4501-8780-44e332749e26 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1071.954547] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2dc4421f-2143-4501-8780-44e332749e26 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1071.954826] env[69328]: INFO nova.compute.manager [None req-2dc4421f-2143-4501-8780-44e332749e26 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1071.955215] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2dc4421f-2143-4501-8780-44e332749e26 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1071.955499] env[69328]: DEBUG nova.compute.manager [-] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1071.955642] env[69328]: DEBUG nova.network.neutron [-] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1072.028083] env[69328]: DEBUG oslo_vmware.api [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52fdb095-8992-1910-290e-bec15e7bdc71, 'name': SearchDatastore_Task, 'duration_secs': 0.009115} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.029337] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6d7ac3d-5260-42cd-be82-9cca1d98c76b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.035229] env[69328]: DEBUG oslo_vmware.api [None req-45ccd5e6-2000-49b0-9ef5-5f1f41d2adf6 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273962, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.038980] env[69328]: DEBUG oslo_vmware.api [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Waiting for the task: (returnval){ [ 1072.038980] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]525ee4a9-b570-c7e9-7b25-a8ce536c4b36" [ 1072.038980] env[69328]: _type = "Task" [ 1072.038980] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.046548] env[69328]: DEBUG oslo_vmware.api [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]525ee4a9-b570-c7e9-7b25-a8ce536c4b36, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.090414] env[69328]: DEBUG oslo_concurrency.lockutils [req-5c62a8ef-9c2f-418a-be5a-8957546b3315 req-84bde15c-8327-44ba-bb1d-23f6bd7bd1f8 service nova] Releasing lock "refresh_cache-dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1072.160338] env[69328]: DEBUG oslo_vmware.api [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273966, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.168581] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c83f3c34-5732-4221-98a5-17e4bc0a9442 tempest-ImagesTestJSON-1434614674 tempest-ImagesTestJSON-1434614674-project-member] Lock "6ad357d9-c35a-4fdb-8dd0-39a0617bf85e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.997s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1072.170674] env[69328]: DEBUG nova.compute.manager [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1072.198154] env[69328]: DEBUG nova.virt.hardware [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1072.198417] env[69328]: DEBUG nova.virt.hardware [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1072.198576] env[69328]: DEBUG nova.virt.hardware [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1072.198759] env[69328]: DEBUG nova.virt.hardware [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1072.198908] env[69328]: DEBUG nova.virt.hardware [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1072.199070] env[69328]: DEBUG nova.virt.hardware [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1072.199285] env[69328]: DEBUG nova.virt.hardware [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1072.199444] env[69328]: DEBUG nova.virt.hardware [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1072.199609] env[69328]: DEBUG nova.virt.hardware [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1072.199768] env[69328]: DEBUG nova.virt.hardware [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1072.199938] env[69328]: DEBUG nova.virt.hardware [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1072.200835] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d98ef4d-3c7d-4412-8aef-bd2de6d5cb25 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.210461] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8ff0c85-35ba-45cb-a30d-9466fb3eb0fc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.354519] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-364916c8-9021-4b4b-aa8b-fda86892929b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.366065] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-258f8c3e-a217-405c-9577-68f6327e4e49 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.403523] env[69328]: DEBUG nova.compute.manager [req-0698156f-dba0-4a1e-9ff9-a0d3fb51eca8 req-1b962072-326e-43f1-9f34-9362b602050b service nova] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] Detach interface failed, port_id=cbad07b8-acca-4410-abd7-78b9b5a05849, reason: Instance a7d4893f-31d4-449d-96d5-a2a1377d8454 could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1072.483652] env[69328]: DEBUG nova.compute.manager [req-ac92c267-7ba9-4d92-9991-0b0dcd3a8b42 req-c49ee181-3917-4d53-bbe1-335297db1ed7 service nova] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Received event network-vif-deleted-b90c50eb-decb-4850-8c7e-af0b3b67eaf0 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1072.483759] env[69328]: INFO nova.compute.manager [req-ac92c267-7ba9-4d92-9991-0b0dcd3a8b42 req-c49ee181-3917-4d53-bbe1-335297db1ed7 service nova] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Neutron deleted interface b90c50eb-decb-4850-8c7e-af0b3b67eaf0; detaching it from the instance and deleting it from the info cache [ 1072.483937] env[69328]: DEBUG nova.network.neutron [req-ac92c267-7ba9-4d92-9991-0b0dcd3a8b42 req-c49ee181-3917-4d53-bbe1-335297db1ed7 service nova] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1072.526332] env[69328]: DEBUG nova.compute.manager [req-b48c441e-d62e-46da-b01a-8b1ddba5ff44 req-b85d207d-f7d8-4a4f-8701-e10a5b47f0ab service nova] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Received event network-vif-plugged-23a25695-a7ad-41dd-b5a1-29ee8d22538e {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1072.526332] env[69328]: DEBUG oslo_concurrency.lockutils [req-b48c441e-d62e-46da-b01a-8b1ddba5ff44 req-b85d207d-f7d8-4a4f-8701-e10a5b47f0ab service nova] Acquiring lock "c1829dcf-3608-4955-bd50-eb9ee27d38e1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1072.526332] env[69328]: DEBUG oslo_concurrency.lockutils [req-b48c441e-d62e-46da-b01a-8b1ddba5ff44 req-b85d207d-f7d8-4a4f-8701-e10a5b47f0ab service nova] Lock "c1829dcf-3608-4955-bd50-eb9ee27d38e1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1072.526332] env[69328]: DEBUG oslo_concurrency.lockutils [req-b48c441e-d62e-46da-b01a-8b1ddba5ff44 req-b85d207d-f7d8-4a4f-8701-e10a5b47f0ab service nova] Lock "c1829dcf-3608-4955-bd50-eb9ee27d38e1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1072.526332] env[69328]: DEBUG nova.compute.manager [req-b48c441e-d62e-46da-b01a-8b1ddba5ff44 req-b85d207d-f7d8-4a4f-8701-e10a5b47f0ab service nova] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] No waiting events found dispatching network-vif-plugged-23a25695-a7ad-41dd-b5a1-29ee8d22538e {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1072.526332] env[69328]: WARNING nova.compute.manager [req-b48c441e-d62e-46da-b01a-8b1ddba5ff44 req-b85d207d-f7d8-4a4f-8701-e10a5b47f0ab service nova] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Received unexpected event network-vif-plugged-23a25695-a7ad-41dd-b5a1-29ee8d22538e for instance with vm_state building and task_state spawning. [ 1072.536135] env[69328]: DEBUG oslo_vmware.api [None req-45ccd5e6-2000-49b0-9ef5-5f1f41d2adf6 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273962, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.536993] env[69328]: DEBUG nova.network.neutron [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Successfully updated port: 23a25695-a7ad-41dd-b5a1-29ee8d22538e {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1072.544624] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cad9fc95-e21b-4b0f-bc9d-c2dfd2094ab9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.555884] env[69328]: DEBUG oslo_vmware.api [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]525ee4a9-b570-c7e9-7b25-a8ce536c4b36, 'name': SearchDatastore_Task, 'duration_secs': 0.028657} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.557869] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1072.558174] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 275ef1ed-8e60-4151-b548-e22e5bd8efe2/275ef1ed-8e60-4151-b548-e22e5bd8efe2.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1072.558489] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a9852b1a-61c7-4ace-9457-02ee78e2ea4c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.561374] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a30473b-a7be-4df1-96f9-9ad32ba31cf9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.596300] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59c86b00-5eed-42fc-bf6b-658a70c8b4f1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.599101] env[69328]: DEBUG oslo_vmware.api [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Waiting for the task: (returnval){ [ 1072.599101] env[69328]: value = "task-3273967" [ 1072.599101] env[69328]: _type = "Task" [ 1072.599101] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.606392] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11175e53-5788-475c-8d12-4cc723bca7f3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.613546] env[69328]: DEBUG oslo_vmware.api [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': task-3273967, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.625555] env[69328]: DEBUG nova.compute.provider_tree [None req-81971890-40ec-481a-8320-72567755728d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1072.665648] env[69328]: DEBUG oslo_vmware.api [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3273966, 'name': PowerOnVM_Task, 'duration_secs': 0.720821} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.666021] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1072.666460] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d671cf12-77fa-4fbb-9157-dc05c8da639e tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Updating instance '204286d7-c806-48cb-85e9-b2a78571777c' progress to 100 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1072.766183] env[69328]: DEBUG nova.network.neutron [-] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1072.986407] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4b50ae0d-2471-442c-81e7-8890b4221ca7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.998232] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a19c072-225e-4656-9cc2-3b6d1f102793 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.043028] env[69328]: DEBUG nova.compute.manager [req-ac92c267-7ba9-4d92-9991-0b0dcd3a8b42 req-c49ee181-3917-4d53-bbe1-335297db1ed7 service nova] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Detach interface failed, port_id=b90c50eb-decb-4850-8c7e-af0b3b67eaf0, reason: Instance c7321021-15ea-47f4-a8ca-1045f2966394 could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1073.044019] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "refresh_cache-c1829dcf-3608-4955-bd50-eb9ee27d38e1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1073.044172] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquired lock "refresh_cache-c1829dcf-3608-4955-bd50-eb9ee27d38e1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1073.044321] env[69328]: DEBUG nova.network.neutron [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1073.050043] env[69328]: DEBUG oslo_vmware.api [None req-45ccd5e6-2000-49b0-9ef5-5f1f41d2adf6 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273962, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.115358] env[69328]: DEBUG oslo_vmware.api [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': task-3273967, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.521551} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.115707] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 275ef1ed-8e60-4151-b548-e22e5bd8efe2/275ef1ed-8e60-4151-b548-e22e5bd8efe2.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1073.116018] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1073.116380] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-30861310-fdaf-4dc8-b9c1-701f575784af {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.125608] env[69328]: DEBUG oslo_vmware.api [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Waiting for the task: (returnval){ [ 1073.125608] env[69328]: value = "task-3273968" [ 1073.125608] env[69328]: _type = "Task" [ 1073.125608] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.133109] env[69328]: DEBUG nova.scheduler.client.report [None req-81971890-40ec-481a-8320-72567755728d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1073.140602] env[69328]: DEBUG oslo_vmware.api [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': task-3273968, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.268766] env[69328]: INFO nova.compute.manager [-] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Took 1.31 seconds to deallocate network for instance. [ 1073.533955] env[69328]: DEBUG oslo_vmware.api [None req-45ccd5e6-2000-49b0-9ef5-5f1f41d2adf6 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273962, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.589468] env[69328]: DEBUG nova.network.neutron [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1073.636159] env[69328]: DEBUG oslo_vmware.api [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': task-3273968, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077282} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.638983] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1073.639982] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15a23808-c0b8-4a6a-96d0-42140e2606ba {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.643979] env[69328]: DEBUG oslo_concurrency.lockutils [None req-81971890-40ec-481a-8320-72567755728d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.010s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1073.645795] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 6.491s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1073.646014] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1073.646155] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69328) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1073.646461] env[69328]: DEBUG oslo_concurrency.lockutils [None req-33ab74df-e53e-4903-b7a4-7d99e7d8c17d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.489s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1073.646983] env[69328]: DEBUG nova.objects.instance [None req-33ab74df-e53e-4903-b7a4-7d99e7d8c17d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Lazy-loading 'resources' on Instance uuid 6b9757de-a274-4f4d-9b73-cc2ca92b4732 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1073.649115] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-920cced2-4db6-4eec-ab7d-347a1331fb39 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.677307] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Reconfiguring VM instance instance-00000067 to attach disk [datastore2] 275ef1ed-8e60-4151-b548-e22e5bd8efe2/275ef1ed-8e60-4151-b548-e22e5bd8efe2.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1073.681029] env[69328]: INFO nova.scheduler.client.report [None req-81971890-40ec-481a-8320-72567755728d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Deleted allocations for instance 14521ee3-d749-48b4-aeec-23c94ca2cf9f [ 1073.687888] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c967d0e6-bde4-40be-8da9-a03619af466c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.706693] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99fed4dd-5068-4348-8d7f-fde309f7969f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.724067] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad6d7d45-edf8-45fa-93e5-1b4e126bf26c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.726799] env[69328]: DEBUG oslo_vmware.api [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Waiting for the task: (returnval){ [ 1073.726799] env[69328]: value = "task-3273969" [ 1073.726799] env[69328]: _type = "Task" [ 1073.726799] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.733700] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24c9a18c-15aa-4469-b7a9-2bd590c008a0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.739981] env[69328]: DEBUG oslo_vmware.api [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': task-3273969, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.776060] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2dc4421f-2143-4501-8780-44e332749e26 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1073.776421] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178258MB free_disk=115GB free_vcpus=48 pci_devices=None {{(pid=69328) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1073.776762] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1073.790203] env[69328]: DEBUG nova.network.neutron [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Updating instance_info_cache with network_info: [{"id": "23a25695-a7ad-41dd-b5a1-29ee8d22538e", "address": "fa:16:3e:4f:8b:c0", "network": {"id": "ce3bec03-01f9-4ac9-80e4-bfb944c0bb66", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-545997027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87581f423dc64e4fb9fe1d51ebc68597", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23a25695-a7", "ovs_interfaceid": "23a25695-a7ad-41dd-b5a1-29ee8d22538e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1074.035054] env[69328]: DEBUG oslo_vmware.api [None req-45ccd5e6-2000-49b0-9ef5-5f1f41d2adf6 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273962, 'name': ReconfigVM_Task, 'duration_secs': 3.373135} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.035054] env[69328]: DEBUG oslo_concurrency.lockutils [None req-45ccd5e6-2000-49b0-9ef5-5f1f41d2adf6 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Releasing lock "dc050589-e37a-4798-9532-df4ecfab7eb1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1074.035316] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-45ccd5e6-2000-49b0-9ef5-5f1f41d2adf6 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Reconfigured VM to attach interface {{(pid=69328) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1074.215979] env[69328]: DEBUG oslo_concurrency.lockutils [None req-81971890-40ec-481a-8320-72567755728d tempest-ServerDiskConfigTestJSON-214284543 tempest-ServerDiskConfigTestJSON-214284543-project-member] Lock "14521ee3-d749-48b4-aeec-23c94ca2cf9f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.883s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1074.244552] env[69328]: DEBUG oslo_vmware.api [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': task-3273969, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.292080] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Releasing lock "refresh_cache-c1829dcf-3608-4955-bd50-eb9ee27d38e1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1074.292687] env[69328]: DEBUG nova.compute.manager [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Instance network_info: |[{"id": "23a25695-a7ad-41dd-b5a1-29ee8d22538e", "address": "fa:16:3e:4f:8b:c0", "network": {"id": "ce3bec03-01f9-4ac9-80e4-bfb944c0bb66", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-545997027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87581f423dc64e4fb9fe1d51ebc68597", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23a25695-a7", "ovs_interfaceid": "23a25695-a7ad-41dd-b5a1-29ee8d22538e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1074.297992] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4f:8b:c0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1768af3d-3317-4ef5-b484-0c2707d63de7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '23a25695-a7ad-41dd-b5a1-29ee8d22538e', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1074.308033] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1074.308880] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1074.309317] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fcdc7f85-8e8c-4daa-b487-ae9c1194f9fd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.339937] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1074.339937] env[69328]: value = "task-3273970" [ 1074.339937] env[69328]: _type = "Task" [ 1074.339937] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.351939] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273970, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.540762] env[69328]: DEBUG oslo_concurrency.lockutils [None req-45ccd5e6-2000-49b0-9ef5-5f1f41d2adf6 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "interface-dc050589-e37a-4798-9532-df4ecfab7eb1-55e62774-2eb7-4bcb-92f6-a63fc6216cda" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 9.127s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1074.547379] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93fed92f-76bf-42da-b39d-dce8748f51c3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.557283] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac88d55f-e368-4321-9153-89bca3ac9978 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.563859] env[69328]: DEBUG nova.compute.manager [req-1a03dd78-4ec6-4346-aea9-458d664be91d req-22a85701-733c-4034-a6fe-cfbac7e601db service nova] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Received event network-changed-23a25695-a7ad-41dd-b5a1-29ee8d22538e {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1074.563963] env[69328]: DEBUG nova.compute.manager [req-1a03dd78-4ec6-4346-aea9-458d664be91d req-22a85701-733c-4034-a6fe-cfbac7e601db service nova] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Refreshing instance network info cache due to event network-changed-23a25695-a7ad-41dd-b5a1-29ee8d22538e. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1074.564260] env[69328]: DEBUG oslo_concurrency.lockutils [req-1a03dd78-4ec6-4346-aea9-458d664be91d req-22a85701-733c-4034-a6fe-cfbac7e601db service nova] Acquiring lock "refresh_cache-c1829dcf-3608-4955-bd50-eb9ee27d38e1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1074.564493] env[69328]: DEBUG oslo_concurrency.lockutils [req-1a03dd78-4ec6-4346-aea9-458d664be91d req-22a85701-733c-4034-a6fe-cfbac7e601db service nova] Acquired lock "refresh_cache-c1829dcf-3608-4955-bd50-eb9ee27d38e1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1074.564704] env[69328]: DEBUG nova.network.neutron [req-1a03dd78-4ec6-4346-aea9-458d664be91d req-22a85701-733c-4034-a6fe-cfbac7e601db service nova] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Refreshing network info cache for port 23a25695-a7ad-41dd-b5a1-29ee8d22538e {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1074.599945] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa7abd74-8fcc-4c79-af22-9e1365db0fe7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.612535] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6190807c-7bf4-4f14-b476-8a097e398a11 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.630198] env[69328]: DEBUG nova.compute.provider_tree [None req-33ab74df-e53e-4903-b7a4-7d99e7d8c17d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1074.743233] env[69328]: DEBUG oslo_vmware.api [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': task-3273969, 'name': ReconfigVM_Task, 'duration_secs': 0.860563} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.743862] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Reconfigured VM instance instance-00000067 to attach disk [datastore2] 275ef1ed-8e60-4151-b548-e22e5bd8efe2/275ef1ed-8e60-4151-b548-e22e5bd8efe2.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1074.744681] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f6e43c5a-6546-45fc-8f1d-140dd7d2043e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.752517] env[69328]: DEBUG oslo_vmware.api [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Waiting for the task: (returnval){ [ 1074.752517] env[69328]: value = "task-3273971" [ 1074.752517] env[69328]: _type = "Task" [ 1074.752517] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.763652] env[69328]: DEBUG oslo_vmware.api [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': task-3273971, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.885470] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273970, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.979903] env[69328]: DEBUG nova.network.neutron [req-1a03dd78-4ec6-4346-aea9-458d664be91d req-22a85701-733c-4034-a6fe-cfbac7e601db service nova] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Updated VIF entry in instance network info cache for port 23a25695-a7ad-41dd-b5a1-29ee8d22538e. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1074.980376] env[69328]: DEBUG nova.network.neutron [req-1a03dd78-4ec6-4346-aea9-458d664be91d req-22a85701-733c-4034-a6fe-cfbac7e601db service nova] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Updating instance_info_cache with network_info: [{"id": "23a25695-a7ad-41dd-b5a1-29ee8d22538e", "address": "fa:16:3e:4f:8b:c0", "network": {"id": "ce3bec03-01f9-4ac9-80e4-bfb944c0bb66", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-545997027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87581f423dc64e4fb9fe1d51ebc68597", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23a25695-a7", "ovs_interfaceid": "23a25695-a7ad-41dd-b5a1-29ee8d22538e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1075.134622] env[69328]: DEBUG nova.scheduler.client.report [None req-33ab74df-e53e-4903-b7a4-7d99e7d8c17d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1075.165576] env[69328]: DEBUG oslo_concurrency.lockutils [None req-495c950a-50b7-49d3-b9c8-8697742e1ae4 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "204286d7-c806-48cb-85e9-b2a78571777c" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1075.165876] env[69328]: DEBUG oslo_concurrency.lockutils [None req-495c950a-50b7-49d3-b9c8-8697742e1ae4 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "204286d7-c806-48cb-85e9-b2a78571777c" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1075.166090] env[69328]: DEBUG nova.compute.manager [None req-495c950a-50b7-49d3-b9c8-8697742e1ae4 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Going to confirm migration 5 {{(pid=69328) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1075.264550] env[69328]: DEBUG oslo_vmware.api [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': task-3273971, 'name': Rename_Task, 'duration_secs': 0.318032} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.264827] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1075.265071] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-39206b5a-c61f-4cd6-8c1a-7edaaa82c482 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.273260] env[69328]: DEBUG oslo_vmware.api [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Waiting for the task: (returnval){ [ 1075.273260] env[69328]: value = "task-3273972" [ 1075.273260] env[69328]: _type = "Task" [ 1075.273260] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.281303] env[69328]: DEBUG oslo_vmware.api [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': task-3273972, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.384328] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273970, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.483863] env[69328]: DEBUG oslo_concurrency.lockutils [req-1a03dd78-4ec6-4346-aea9-458d664be91d req-22a85701-733c-4034-a6fe-cfbac7e601db service nova] Releasing lock "refresh_cache-c1829dcf-3608-4955-bd50-eb9ee27d38e1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1075.638883] env[69328]: DEBUG oslo_concurrency.lockutils [None req-33ab74df-e53e-4903-b7a4-7d99e7d8c17d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.992s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1075.641276] env[69328]: DEBUG oslo_concurrency.lockutils [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 7.896s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1075.641485] env[69328]: DEBUG nova.objects.instance [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69328) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1075.686516] env[69328]: INFO nova.scheduler.client.report [None req-33ab74df-e53e-4903-b7a4-7d99e7d8c17d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Deleted allocations for instance 6b9757de-a274-4f4d-9b73-cc2ca92b4732 [ 1075.786950] env[69328]: DEBUG oslo_vmware.api [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': task-3273972, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.814066] env[69328]: DEBUG oslo_concurrency.lockutils [None req-495c950a-50b7-49d3-b9c8-8697742e1ae4 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "refresh_cache-204286d7-c806-48cb-85e9-b2a78571777c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1075.814503] env[69328]: DEBUG oslo_concurrency.lockutils [None req-495c950a-50b7-49d3-b9c8-8697742e1ae4 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquired lock "refresh_cache-204286d7-c806-48cb-85e9-b2a78571777c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1075.814815] env[69328]: DEBUG nova.network.neutron [None req-495c950a-50b7-49d3-b9c8-8697742e1ae4 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1075.815137] env[69328]: DEBUG nova.objects.instance [None req-495c950a-50b7-49d3-b9c8-8697742e1ae4 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lazy-loading 'info_cache' on Instance uuid 204286d7-c806-48cb-85e9-b2a78571777c {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1075.883949] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273970, 'name': CreateVM_Task, 'duration_secs': 1.453173} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.884181] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1075.884869] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1075.885047] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1075.885394] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1075.885653] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15d4b526-d433-40cc-aafe-0bc59bf481ff {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.891759] env[69328]: DEBUG oslo_vmware.api [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 1075.891759] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d15e68-7ec7-d3bf-ec50-48e7c218a189" [ 1075.891759] env[69328]: _type = "Task" [ 1075.891759] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.902108] env[69328]: DEBUG oslo_vmware.api [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d15e68-7ec7-d3bf-ec50-48e7c218a189, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.201960] env[69328]: DEBUG oslo_concurrency.lockutils [None req-33ab74df-e53e-4903-b7a4-7d99e7d8c17d tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Lock "6b9757de-a274-4f4d-9b73-cc2ca92b4732" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.965s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1076.284968] env[69328]: DEBUG oslo_vmware.api [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': task-3273972, 'name': PowerOnVM_Task, 'duration_secs': 0.821166} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.285330] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1076.285563] env[69328]: INFO nova.compute.manager [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Took 9.92 seconds to spawn the instance on the hypervisor. [ 1076.285746] env[69328]: DEBUG nova.compute.manager [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1076.286530] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0f90176-2889-4d47-b544-f1eb7921d2dd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.405081] env[69328]: DEBUG oslo_vmware.api [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d15e68-7ec7-d3bf-ec50-48e7c218a189, 'name': SearchDatastore_Task, 'duration_secs': 0.011392} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.405484] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1076.405641] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1076.405882] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1076.406052] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1076.406282] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1076.406556] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-57e5d935-f11c-456b-8010-83592d7ba50a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.417368] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1076.417582] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1076.418332] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e10021a1-82b3-4448-9c9c-76d1928c25ec {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.424329] env[69328]: DEBUG oslo_vmware.api [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 1076.424329] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]520101ce-b9a0-15a3-d45c-d4d58453ca34" [ 1076.424329] env[69328]: _type = "Task" [ 1076.424329] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.435088] env[69328]: DEBUG oslo_vmware.api [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]520101ce-b9a0-15a3-d45c-d4d58453ca34, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.653274] env[69328]: DEBUG oslo_concurrency.lockutils [None req-459a0d67-5751-4980-9f29-740914a11ed2 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1076.653908] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.430s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1076.655582] env[69328]: INFO nova.compute.claims [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1076.769394] env[69328]: DEBUG oslo_concurrency.lockutils [None req-78bb2e64-9e5c-475f-a0c8-9aa88090488b tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Acquiring lock "dd43adb3-b073-483a-81dd-69df7f746874" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1076.769663] env[69328]: DEBUG oslo_concurrency.lockutils [None req-78bb2e64-9e5c-475f-a0c8-9aa88090488b tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Lock "dd43adb3-b073-483a-81dd-69df7f746874" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1076.769879] env[69328]: DEBUG oslo_concurrency.lockutils [None req-78bb2e64-9e5c-475f-a0c8-9aa88090488b tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Acquiring lock "dd43adb3-b073-483a-81dd-69df7f746874-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1076.770080] env[69328]: DEBUG oslo_concurrency.lockutils [None req-78bb2e64-9e5c-475f-a0c8-9aa88090488b tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Lock "dd43adb3-b073-483a-81dd-69df7f746874-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1076.770258] env[69328]: DEBUG oslo_concurrency.lockutils [None req-78bb2e64-9e5c-475f-a0c8-9aa88090488b tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Lock "dd43adb3-b073-483a-81dd-69df7f746874-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1076.772605] env[69328]: INFO nova.compute.manager [None req-78bb2e64-9e5c-475f-a0c8-9aa88090488b tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Terminating instance [ 1076.805094] env[69328]: INFO nova.compute.manager [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Took 29.23 seconds to build instance. [ 1076.936961] env[69328]: DEBUG oslo_vmware.api [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]520101ce-b9a0-15a3-d45c-d4d58453ca34, 'name': SearchDatastore_Task, 'duration_secs': 0.011301} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.937816] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d6c7aff-a7e0-454e-8a1a-f52d0ba02125 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.945878] env[69328]: DEBUG oslo_vmware.api [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 1076.945878] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52a3e1c2-cd60-caa0-8775-d58453561f41" [ 1076.945878] env[69328]: _type = "Task" [ 1076.945878] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.959898] env[69328]: DEBUG oslo_vmware.api [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52a3e1c2-cd60-caa0-8775-d58453561f41, 'name': SearchDatastore_Task, 'duration_secs': 0.012128} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.960196] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1076.960489] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] c1829dcf-3608-4955-bd50-eb9ee27d38e1/c1829dcf-3608-4955-bd50-eb9ee27d38e1.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1076.960785] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d1729dcd-09fc-447b-beca-4e101af2d2de {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.970281] env[69328]: DEBUG oslo_vmware.api [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 1076.970281] env[69328]: value = "task-3273973" [ 1076.970281] env[69328]: _type = "Task" [ 1076.970281] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.982480] env[69328]: DEBUG oslo_vmware.api [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273973, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.119683] env[69328]: DEBUG oslo_concurrency.lockutils [None req-51c8a109-ad37-4ba7-add6-55ce64a11cde tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "interface-dc050589-e37a-4798-9532-df4ecfab7eb1-2febedad-c6fa-48cf-893b-6baa5b6ddcd6" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1077.120303] env[69328]: DEBUG oslo_concurrency.lockutils [None req-51c8a109-ad37-4ba7-add6-55ce64a11cde tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "interface-dc050589-e37a-4798-9532-df4ecfab7eb1-2febedad-c6fa-48cf-893b-6baa5b6ddcd6" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1077.155180] env[69328]: DEBUG nova.network.neutron [None req-495c950a-50b7-49d3-b9c8-8697742e1ae4 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Updating instance_info_cache with network_info: [{"id": "e957681a-e4bc-4b9a-b2b7-a4783ae059b8", "address": "fa:16:3e:be:0a:24", "network": {"id": "620f277d-ca49-41da-83a0-afb8c393e26e", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1223326239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cdc479a290524130b9d17e627a64b65a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86a35d07-53d3-46b3-92cb-ae34236c0f41", "external-id": "nsx-vlan-transportzone-811", "segmentation_id": 811, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape957681a-e4", "ovs_interfaceid": "e957681a-e4bc-4b9a-b2b7-a4783ae059b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1077.276462] env[69328]: DEBUG nova.compute.manager [None req-78bb2e64-9e5c-475f-a0c8-9aa88090488b tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1077.276783] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-78bb2e64-9e5c-475f-a0c8-9aa88090488b tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1077.277879] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e27f2931-407e-4ab3-89b3-12bf5531666c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.287735] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-78bb2e64-9e5c-475f-a0c8-9aa88090488b tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1077.288058] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bee07b17-affe-42da-88a9-ff063f6ce4da {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.296372] env[69328]: DEBUG oslo_vmware.api [None req-78bb2e64-9e5c-475f-a0c8-9aa88090488b tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Waiting for the task: (returnval){ [ 1077.296372] env[69328]: value = "task-3273974" [ 1077.296372] env[69328]: _type = "Task" [ 1077.296372] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.307495] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ca4b717c-dce8-4e96-90e2-6a6f534405a7 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Lock "275ef1ed-8e60-4151-b548-e22e5bd8efe2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.748s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1077.307844] env[69328]: DEBUG oslo_vmware.api [None req-78bb2e64-9e5c-475f-a0c8-9aa88090488b tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273974, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.333745] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f9081d18-bbdc-4099-8e52-037a88cc3773 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Acquiring lock "a0952fdf-5570-4112-bc4d-e9f9cee1599c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1077.334115] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f9081d18-bbdc-4099-8e52-037a88cc3773 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Lock "a0952fdf-5570-4112-bc4d-e9f9cee1599c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1077.334351] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f9081d18-bbdc-4099-8e52-037a88cc3773 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Acquiring lock "a0952fdf-5570-4112-bc4d-e9f9cee1599c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1077.334547] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f9081d18-bbdc-4099-8e52-037a88cc3773 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Lock "a0952fdf-5570-4112-bc4d-e9f9cee1599c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1077.334746] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f9081d18-bbdc-4099-8e52-037a88cc3773 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Lock "a0952fdf-5570-4112-bc4d-e9f9cee1599c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1077.337200] env[69328]: INFO nova.compute.manager [None req-f9081d18-bbdc-4099-8e52-037a88cc3773 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Terminating instance [ 1077.484080] env[69328]: DEBUG oslo_vmware.api [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273973, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.504724} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.484370] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] c1829dcf-3608-4955-bd50-eb9ee27d38e1/c1829dcf-3608-4955-bd50-eb9ee27d38e1.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1077.484583] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1077.484842] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f8195b43-b15b-4ffd-a0da-9b69bece11b7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.493475] env[69328]: DEBUG oslo_vmware.api [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 1077.493475] env[69328]: value = "task-3273975" [ 1077.493475] env[69328]: _type = "Task" [ 1077.493475] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.507023] env[69328]: DEBUG oslo_vmware.api [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273975, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.625556] env[69328]: DEBUG oslo_concurrency.lockutils [None req-51c8a109-ad37-4ba7-add6-55ce64a11cde tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "dc050589-e37a-4798-9532-df4ecfab7eb1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1077.625843] env[69328]: DEBUG oslo_concurrency.lockutils [None req-51c8a109-ad37-4ba7-add6-55ce64a11cde tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquired lock "dc050589-e37a-4798-9532-df4ecfab7eb1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1077.627113] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd34dcd5-95d3-41c8-8ef1-cefe273c0c59 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.647517] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac0ea31e-5e2d-4eeb-ae74-4c2ed8c828bd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.673742] env[69328]: DEBUG oslo_concurrency.lockutils [None req-495c950a-50b7-49d3-b9c8-8697742e1ae4 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Releasing lock "refresh_cache-204286d7-c806-48cb-85e9-b2a78571777c" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1077.674427] env[69328]: DEBUG nova.objects.instance [None req-495c950a-50b7-49d3-b9c8-8697742e1ae4 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lazy-loading 'migration_context' on Instance uuid 204286d7-c806-48cb-85e9-b2a78571777c {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1077.684342] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-51c8a109-ad37-4ba7-add6-55ce64a11cde tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Reconfiguring VM to detach interface {{(pid=69328) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1077.685782] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f253c040-264a-4d73-ae55-377bd65ec62e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.709735] env[69328]: DEBUG oslo_vmware.api [None req-51c8a109-ad37-4ba7-add6-55ce64a11cde tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for the task: (returnval){ [ 1077.709735] env[69328]: value = "task-3273976" [ 1077.709735] env[69328]: _type = "Task" [ 1077.709735] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.721257] env[69328]: DEBUG oslo_vmware.api [None req-51c8a109-ad37-4ba7-add6-55ce64a11cde tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273976, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.809158] env[69328]: DEBUG oslo_vmware.api [None req-78bb2e64-9e5c-475f-a0c8-9aa88090488b tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273974, 'name': PowerOffVM_Task, 'duration_secs': 0.294544} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.809442] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-78bb2e64-9e5c-475f-a0c8-9aa88090488b tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1077.809612] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-78bb2e64-9e5c-475f-a0c8-9aa88090488b tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1077.809868] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-69613dd7-1e86-4938-83d2-3339de4793a1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.842942] env[69328]: DEBUG nova.compute.manager [None req-f9081d18-bbdc-4099-8e52-037a88cc3773 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1077.842942] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f9081d18-bbdc-4099-8e52-037a88cc3773 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1077.843785] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88a1e3ca-4fdd-4ebf-a0b9-95e29ddcc86e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.854765] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9081d18-bbdc-4099-8e52-037a88cc3773 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1077.855134] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3ad957aa-c080-491b-a80c-4b70423caf3f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.863323] env[69328]: DEBUG oslo_vmware.api [None req-f9081d18-bbdc-4099-8e52-037a88cc3773 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Waiting for the task: (returnval){ [ 1077.863323] env[69328]: value = "task-3273978" [ 1077.863323] env[69328]: _type = "Task" [ 1077.863323] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.876332] env[69328]: DEBUG oslo_vmware.api [None req-f9081d18-bbdc-4099-8e52-037a88cc3773 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273978, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.895668] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-78bb2e64-9e5c-475f-a0c8-9aa88090488b tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1077.895668] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-78bb2e64-9e5c-475f-a0c8-9aa88090488b tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1077.895668] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-78bb2e64-9e5c-475f-a0c8-9aa88090488b tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Deleting the datastore file [datastore1] dd43adb3-b073-483a-81dd-69df7f746874 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1077.895896] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b7b81c0b-355d-4f81-a78e-ae9258c0fed4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.903285] env[69328]: DEBUG oslo_vmware.api [None req-78bb2e64-9e5c-475f-a0c8-9aa88090488b tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Waiting for the task: (returnval){ [ 1077.903285] env[69328]: value = "task-3273979" [ 1077.903285] env[69328]: _type = "Task" [ 1077.903285] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.913755] env[69328]: DEBUG oslo_vmware.api [None req-78bb2e64-9e5c-475f-a0c8-9aa88090488b tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273979, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.005176] env[69328]: DEBUG oslo_vmware.api [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273975, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070371} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.005628] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1078.006318] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f5a2174-a31a-4df4-b206-4912a03e5dca {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.030255] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Reconfiguring VM instance instance-00000068 to attach disk [datastore2] c1829dcf-3608-4955-bd50-eb9ee27d38e1/c1829dcf-3608-4955-bd50-eb9ee27d38e1.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1078.033381] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9d2b464d-3f95-45a7-a2c8-4023d6d68b79 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.057069] env[69328]: DEBUG oslo_vmware.api [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 1078.057069] env[69328]: value = "task-3273980" [ 1078.057069] env[69328]: _type = "Task" [ 1078.057069] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.063493] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dab1b325-15e0-4c1c-af11-ed7c2f41bb31 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.069664] env[69328]: DEBUG oslo_vmware.api [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273980, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.074996] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e563984-2c21-4dd2-981a-36fd481a5071 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.111512] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc2895e8-a421-4f1e-9a4e-31d77f31e2dc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.120148] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59f6ae57-0673-4d15-8a27-a3110bfb4156 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.138633] env[69328]: DEBUG nova.compute.provider_tree [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1078.186117] env[69328]: DEBUG nova.objects.base [None req-495c950a-50b7-49d3-b9c8-8697742e1ae4 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Object Instance<204286d7-c806-48cb-85e9-b2a78571777c> lazy-loaded attributes: info_cache,migration_context {{(pid=69328) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1078.187089] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9940039-0640-4078-8452-0712f64ef363 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.209822] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6bc78808-c0f3-4c16-bcf5-26ade6e76974 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.219934] env[69328]: DEBUG oslo_vmware.api [None req-495c950a-50b7-49d3-b9c8-8697742e1ae4 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 1078.219934] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5259315c-51a3-e5b9-1ec6-87e51d5ff7e4" [ 1078.219934] env[69328]: _type = "Task" [ 1078.219934] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.225450] env[69328]: DEBUG oslo_vmware.api [None req-51c8a109-ad37-4ba7-add6-55ce64a11cde tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273976, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.234144] env[69328]: DEBUG oslo_vmware.api [None req-495c950a-50b7-49d3-b9c8-8697742e1ae4 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5259315c-51a3-e5b9-1ec6-87e51d5ff7e4, 'name': SearchDatastore_Task, 'duration_secs': 0.009079} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.234460] env[69328]: DEBUG oslo_concurrency.lockutils [None req-495c950a-50b7-49d3-b9c8-8697742e1ae4 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1078.262604] env[69328]: DEBUG nova.compute.manager [req-dc135b61-216b-4a07-8160-4ce6fe42f8b9 req-85e71446-25b1-4c4d-91b4-d71482cbdee5 service nova] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Received event network-changed-4516486f-d6cd-476a-a5ad-3d3fd9191731 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1078.262834] env[69328]: DEBUG nova.compute.manager [req-dc135b61-216b-4a07-8160-4ce6fe42f8b9 req-85e71446-25b1-4c4d-91b4-d71482cbdee5 service nova] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Refreshing instance network info cache due to event network-changed-4516486f-d6cd-476a-a5ad-3d3fd9191731. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1078.263303] env[69328]: DEBUG oslo_concurrency.lockutils [req-dc135b61-216b-4a07-8160-4ce6fe42f8b9 req-85e71446-25b1-4c4d-91b4-d71482cbdee5 service nova] Acquiring lock "refresh_cache-275ef1ed-8e60-4151-b548-e22e5bd8efe2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1078.263495] env[69328]: DEBUG oslo_concurrency.lockutils [req-dc135b61-216b-4a07-8160-4ce6fe42f8b9 req-85e71446-25b1-4c4d-91b4-d71482cbdee5 service nova] Acquired lock "refresh_cache-275ef1ed-8e60-4151-b548-e22e5bd8efe2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1078.263663] env[69328]: DEBUG nova.network.neutron [req-dc135b61-216b-4a07-8160-4ce6fe42f8b9 req-85e71446-25b1-4c4d-91b4-d71482cbdee5 service nova] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Refreshing network info cache for port 4516486f-d6cd-476a-a5ad-3d3fd9191731 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1078.373831] env[69328]: DEBUG oslo_vmware.api [None req-f9081d18-bbdc-4099-8e52-037a88cc3773 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273978, 'name': PowerOffVM_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.415525] env[69328]: DEBUG oslo_vmware.api [None req-78bb2e64-9e5c-475f-a0c8-9aa88090488b tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Task: {'id': task-3273979, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.15616} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.415834] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-78bb2e64-9e5c-475f-a0c8-9aa88090488b tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1078.415976] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-78bb2e64-9e5c-475f-a0c8-9aa88090488b tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1078.416177] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-78bb2e64-9e5c-475f-a0c8-9aa88090488b tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1078.416378] env[69328]: INFO nova.compute.manager [None req-78bb2e64-9e5c-475f-a0c8-9aa88090488b tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1078.416835] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-78bb2e64-9e5c-475f-a0c8-9aa88090488b tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1078.417065] env[69328]: DEBUG nova.compute.manager [-] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1078.417163] env[69328]: DEBUG nova.network.neutron [-] [instance: dd43adb3-b073-483a-81dd-69df7f746874] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1078.567552] env[69328]: DEBUG oslo_vmware.api [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273980, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.644369] env[69328]: DEBUG nova.scheduler.client.report [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1078.723796] env[69328]: DEBUG oslo_vmware.api [None req-51c8a109-ad37-4ba7-add6-55ce64a11cde tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273976, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.876930] env[69328]: DEBUG oslo_vmware.api [None req-f9081d18-bbdc-4099-8e52-037a88cc3773 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273978, 'name': PowerOffVM_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.042961] env[69328]: DEBUG nova.network.neutron [req-dc135b61-216b-4a07-8160-4ce6fe42f8b9 req-85e71446-25b1-4c4d-91b4-d71482cbdee5 service nova] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Updated VIF entry in instance network info cache for port 4516486f-d6cd-476a-a5ad-3d3fd9191731. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1079.042961] env[69328]: DEBUG nova.network.neutron [req-dc135b61-216b-4a07-8160-4ce6fe42f8b9 req-85e71446-25b1-4c4d-91b4-d71482cbdee5 service nova] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Updating instance_info_cache with network_info: [{"id": "4516486f-d6cd-476a-a5ad-3d3fd9191731", "address": "fa:16:3e:3f:3a:9a", "network": {"id": "c37f7cbb-9e72-43fb-b82a-5602208856c5", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1726899944-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e9e98f83e974a32b0db6ce5e8442012", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4fc0575-c8e3-4f3c-b2e1-e10ac2d0cc1a", "external-id": "nsx-vlan-transportzone-256", "segmentation_id": 256, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4516486f-d6", "ovs_interfaceid": "4516486f-d6cd-476a-a5ad-3d3fd9191731", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1079.067951] env[69328]: DEBUG oslo_vmware.api [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273980, 'name': ReconfigVM_Task, 'duration_secs': 0.823559} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.068414] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Reconfigured VM instance instance-00000068 to attach disk [datastore2] c1829dcf-3608-4955-bd50-eb9ee27d38e1/c1829dcf-3608-4955-bd50-eb9ee27d38e1.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1079.069316] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c6ba714a-cc63-4256-9b28-ecc8bcd794bd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.079196] env[69328]: DEBUG oslo_vmware.api [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 1079.079196] env[69328]: value = "task-3273981" [ 1079.079196] env[69328]: _type = "Task" [ 1079.079196] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.090356] env[69328]: DEBUG oslo_vmware.api [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273981, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.147705] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.494s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1079.148305] env[69328]: DEBUG nova.compute.manager [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1079.151931] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c70083ce-df8d-48c6-9063-ceb41c197062 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.744s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1079.152189] env[69328]: DEBUG nova.objects.instance [None req-c70083ce-df8d-48c6-9063-ceb41c197062 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lazy-loading 'resources' on Instance uuid a7d4893f-31d4-449d-96d5-a2a1377d8454 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1079.159881] env[69328]: DEBUG nova.network.neutron [-] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1079.223924] env[69328]: DEBUG oslo_vmware.api [None req-51c8a109-ad37-4ba7-add6-55ce64a11cde tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273976, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.377498] env[69328]: DEBUG oslo_vmware.api [None req-f9081d18-bbdc-4099-8e52-037a88cc3773 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273978, 'name': PowerOffVM_Task, 'duration_secs': 1.400531} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.377806] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9081d18-bbdc-4099-8e52-037a88cc3773 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1079.378674] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f9081d18-bbdc-4099-8e52-037a88cc3773 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1079.378674] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1996a686-b1b7-44bf-9c18-3e3eef2bf34d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.468373] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f9081d18-bbdc-4099-8e52-037a88cc3773 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1079.468662] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f9081d18-bbdc-4099-8e52-037a88cc3773 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1079.468853] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9081d18-bbdc-4099-8e52-037a88cc3773 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Deleting the datastore file [datastore1] a0952fdf-5570-4112-bc4d-e9f9cee1599c {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1079.469154] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0606b1ed-7887-4156-ab6f-af9960da2a73 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.478892] env[69328]: DEBUG oslo_vmware.api [None req-f9081d18-bbdc-4099-8e52-037a88cc3773 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Waiting for the task: (returnval){ [ 1079.478892] env[69328]: value = "task-3273983" [ 1079.478892] env[69328]: _type = "Task" [ 1079.478892] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.489013] env[69328]: DEBUG oslo_vmware.api [None req-f9081d18-bbdc-4099-8e52-037a88cc3773 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273983, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.545924] env[69328]: DEBUG oslo_concurrency.lockutils [req-dc135b61-216b-4a07-8160-4ce6fe42f8b9 req-85e71446-25b1-4c4d-91b4-d71482cbdee5 service nova] Releasing lock "refresh_cache-275ef1ed-8e60-4151-b548-e22e5bd8efe2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1079.591075] env[69328]: DEBUG oslo_vmware.api [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273981, 'name': Rename_Task, 'duration_secs': 0.398793} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.591075] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1079.592079] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-aba79bbf-c743-4e09-924d-61db8dcdac4f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.600395] env[69328]: DEBUG oslo_vmware.api [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 1079.600395] env[69328]: value = "task-3273984" [ 1079.600395] env[69328]: _type = "Task" [ 1079.600395] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.611738] env[69328]: DEBUG oslo_vmware.api [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273984, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.657291] env[69328]: DEBUG nova.compute.utils [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1079.661756] env[69328]: DEBUG nova.compute.manager [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Not allocating networking since 'none' was specified. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1079.662570] env[69328]: INFO nova.compute.manager [-] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Took 1.25 seconds to deallocate network for instance. [ 1079.733027] env[69328]: DEBUG oslo_vmware.api [None req-51c8a109-ad37-4ba7-add6-55ce64a11cde tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273976, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.993992] env[69328]: DEBUG oslo_vmware.api [None req-f9081d18-bbdc-4099-8e52-037a88cc3773 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Task: {'id': task-3273983, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144376} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.993992] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9081d18-bbdc-4099-8e52-037a88cc3773 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1079.993992] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f9081d18-bbdc-4099-8e52-037a88cc3773 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1079.995742] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f9081d18-bbdc-4099-8e52-037a88cc3773 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1079.995742] env[69328]: INFO nova.compute.manager [None req-f9081d18-bbdc-4099-8e52-037a88cc3773 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Took 2.15 seconds to destroy the instance on the hypervisor. [ 1079.995934] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f9081d18-bbdc-4099-8e52-037a88cc3773 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1079.996113] env[69328]: DEBUG nova.compute.manager [-] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1079.996233] env[69328]: DEBUG nova.network.neutron [-] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1080.061572] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a17f8a9-6b2e-499e-8031-f74d13c8deca {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.070431] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18582c1b-a34d-4d94-927f-0cec9df4bbbe {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.118474] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efd5419f-a30a-4c33-a12d-72d26a152368 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.129797] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-917a9a28-4e7a-4c00-a1e3-0791099c8938 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.135520] env[69328]: DEBUG oslo_vmware.api [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273984, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.146035] env[69328]: DEBUG nova.compute.provider_tree [None req-c70083ce-df8d-48c6-9063-ceb41c197062 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1080.164480] env[69328]: DEBUG nova.compute.manager [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1080.175110] env[69328]: DEBUG oslo_concurrency.lockutils [None req-78bb2e64-9e5c-475f-a0c8-9aa88090488b tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1080.230310] env[69328]: DEBUG oslo_vmware.api [None req-51c8a109-ad37-4ba7-add6-55ce64a11cde tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273976, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.313800] env[69328]: DEBUG nova.compute.manager [req-1890f817-55bc-4f57-b992-fdb8897d8c95 req-e1034a54-b339-46b1-8ebc-c04b47eeeb24 service nova] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Received event network-vif-deleted-e0c14c41-b680-40a2-a769-2b4191814a41 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1080.625292] env[69328]: DEBUG oslo_vmware.api [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3273984, 'name': PowerOnVM_Task, 'duration_secs': 0.804003} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.625398] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1080.625555] env[69328]: INFO nova.compute.manager [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Took 8.45 seconds to spawn the instance on the hypervisor. [ 1080.625809] env[69328]: DEBUG nova.compute.manager [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1080.626593] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38102011-2c93-44b1-94fa-bbb3ddcc139a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.649523] env[69328]: DEBUG nova.scheduler.client.report [None req-c70083ce-df8d-48c6-9063-ceb41c197062 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1080.731513] env[69328]: DEBUG oslo_vmware.api [None req-51c8a109-ad37-4ba7-add6-55ce64a11cde tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273976, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.830879] env[69328]: DEBUG nova.network.neutron [-] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1081.146638] env[69328]: INFO nova.compute.manager [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Took 26.91 seconds to build instance. [ 1081.158825] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c70083ce-df8d-48c6-9063-ceb41c197062 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.006s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1081.164608] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2dc4421f-2143-4501-8780-44e332749e26 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.389s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1081.165567] env[69328]: DEBUG nova.objects.instance [None req-2dc4421f-2143-4501-8780-44e332749e26 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Lazy-loading 'resources' on Instance uuid c7321021-15ea-47f4-a8ca-1045f2966394 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1081.178196] env[69328]: DEBUG nova.compute.manager [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1081.195434] env[69328]: INFO nova.scheduler.client.report [None req-c70083ce-df8d-48c6-9063-ceb41c197062 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Deleted allocations for instance a7d4893f-31d4-449d-96d5-a2a1377d8454 [ 1081.207596] env[69328]: DEBUG nova.virt.hardware [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1081.207844] env[69328]: DEBUG nova.virt.hardware [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1081.208575] env[69328]: DEBUG nova.virt.hardware [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1081.208848] env[69328]: DEBUG nova.virt.hardware [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1081.209008] env[69328]: DEBUG nova.virt.hardware [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1081.209173] env[69328]: DEBUG nova.virt.hardware [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1081.209392] env[69328]: DEBUG nova.virt.hardware [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1081.209598] env[69328]: DEBUG nova.virt.hardware [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1081.209793] env[69328]: DEBUG nova.virt.hardware [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1081.209959] env[69328]: DEBUG nova.virt.hardware [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1081.210151] env[69328]: DEBUG nova.virt.hardware [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1081.211109] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-916aa73c-f781-41c6-9d53-80a7549394cf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.224585] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29aee576-b776-4b04-ad93-0f39e8c5944c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.241202] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Instance VIF info [] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1081.247384] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Creating folder: Project (ef2097e3e6e74058b5d6f9f657864904). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1081.250964] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cfbb3337-4ddf-4026-8426-ba567c3b34e6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.252815] env[69328]: DEBUG oslo_vmware.api [None req-51c8a109-ad37-4ba7-add6-55ce64a11cde tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273976, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.267148] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Created folder: Project (ef2097e3e6e74058b5d6f9f657864904) in parent group-v653649. [ 1081.267449] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Creating folder: Instances. Parent ref: group-v653925. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1081.267868] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f275d20f-605a-49f3-9ce8-409d9d51823c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.280620] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Created folder: Instances in parent group-v653925. [ 1081.280872] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1081.281146] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1081.281470] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c4eef369-c104-46d5-814b-caffeeaf23a3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.302129] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1081.302129] env[69328]: value = "task-3273987" [ 1081.302129] env[69328]: _type = "Task" [ 1081.302129] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.312438] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273987, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.333649] env[69328]: INFO nova.compute.manager [-] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Took 1.34 seconds to deallocate network for instance. [ 1081.648657] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f1922ead-eaae-4521-8956-b9889af26499 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "c1829dcf-3608-4955-bd50-eb9ee27d38e1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.429s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1081.712084] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c70083ce-df8d-48c6-9063-ceb41c197062 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "a7d4893f-31d4-449d-96d5-a2a1377d8454" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.837s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1081.736886] env[69328]: DEBUG oslo_vmware.api [None req-51c8a109-ad37-4ba7-add6-55ce64a11cde tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273976, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.814524] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273987, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.841573] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f9081d18-bbdc-4099-8e52-037a88cc3773 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1081.997265] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c480776a-7f1c-473c-996a-eabb0c33581a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.008796] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cd202c5-7b60-412f-8257-4f086920d12e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.040797] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1a76b9e-60e9-4ca2-a484-eecc837fdf09 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.049024] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-919c09b4-1ee4-4998-bed8-6e3f3880cead {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.063189] env[69328]: DEBUG nova.compute.provider_tree [None req-2dc4421f-2143-4501-8780-44e332749e26 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1082.117666] env[69328]: DEBUG oslo_concurrency.lockutils [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Acquiring lock "fb2d04d8-cff6-414c-9d50-3ab61729546d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1082.117666] env[69328]: DEBUG oslo_concurrency.lockutils [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Lock "fb2d04d8-cff6-414c-9d50-3ab61729546d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1082.232081] env[69328]: DEBUG oslo_vmware.api [None req-51c8a109-ad37-4ba7-add6-55ce64a11cde tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273976, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.315467] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3273987, 'name': CreateVM_Task, 'duration_secs': 0.543699} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.315653] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1082.316118] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1082.316286] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1082.316692] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1082.316966] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-654965a3-7009-403e-a2c0-d5e5195374d0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.323177] env[69328]: DEBUG oslo_vmware.api [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Waiting for the task: (returnval){ [ 1082.323177] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52838a16-82ec-157a-9187-01fb55006402" [ 1082.323177] env[69328]: _type = "Task" [ 1082.323177] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.331797] env[69328]: DEBUG oslo_vmware.api [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52838a16-82ec-157a-9187-01fb55006402, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.568174] env[69328]: DEBUG nova.scheduler.client.report [None req-2dc4421f-2143-4501-8780-44e332749e26 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1082.620118] env[69328]: DEBUG nova.compute.manager [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1082.731257] env[69328]: DEBUG oslo_vmware.api [None req-51c8a109-ad37-4ba7-add6-55ce64a11cde tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273976, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.834148] env[69328]: DEBUG oslo_vmware.api [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52838a16-82ec-157a-9187-01fb55006402, 'name': SearchDatastore_Task, 'duration_secs': 0.020511} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.834490] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1082.834705] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1082.834944] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1082.835115] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1082.835331] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1082.835628] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b0caec96-45da-4cf1-b7fc-0ece39d2de51 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.846059] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1082.846386] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1082.847020] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-718201c7-0e6c-4153-aab8-05cb0c6a795e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.852950] env[69328]: DEBUG oslo_vmware.api [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Waiting for the task: (returnval){ [ 1082.852950] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e83d60-a650-cba8-c910-3fe55d9cb29e" [ 1082.852950] env[69328]: _type = "Task" [ 1082.852950] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.862815] env[69328]: DEBUG oslo_vmware.api [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e83d60-a650-cba8-c910-3fe55d9cb29e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.988850] env[69328]: DEBUG nova.compute.manager [req-5dd868f5-e035-47d6-9791-0b36f7caf233 req-7b153d2f-c69a-4254-8bcc-cfdd72e61e3c service nova] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Received event network-vif-deleted-c74af0b7-ebfb-4563-9208-a18235899a6c {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1083.075390] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2dc4421f-2143-4501-8780-44e332749e26 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.911s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1083.077961] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 9.301s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1083.102330] env[69328]: INFO nova.scheduler.client.report [None req-2dc4421f-2143-4501-8780-44e332749e26 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Deleted allocations for instance c7321021-15ea-47f4-a8ca-1045f2966394 [ 1083.141137] env[69328]: DEBUG oslo_concurrency.lockutils [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1083.232232] env[69328]: DEBUG oslo_vmware.api [None req-51c8a109-ad37-4ba7-add6-55ce64a11cde tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273976, 'name': ReconfigVM_Task} progress is 18%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.365782] env[69328]: DEBUG oslo_vmware.api [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e83d60-a650-cba8-c910-3fe55d9cb29e, 'name': SearchDatastore_Task, 'duration_secs': 0.011023} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.366607] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8794411b-702f-4c92-bd18-4ef962490caf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.373963] env[69328]: DEBUG oslo_vmware.api [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Waiting for the task: (returnval){ [ 1083.373963] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ce1b3f-49ad-fed4-0559-cf907e130dc5" [ 1083.373963] env[69328]: _type = "Task" [ 1083.373963] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.382957] env[69328]: DEBUG oslo_vmware.api [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ce1b3f-49ad-fed4-0559-cf907e130dc5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.594470] env[69328]: DEBUG oslo_concurrency.lockutils [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "aaa9deb3-9a52-43e3-bf9b-a53922439be2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1083.594470] env[69328]: DEBUG oslo_concurrency.lockutils [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "aaa9deb3-9a52-43e3-bf9b-a53922439be2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1083.612166] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2dc4421f-2143-4501-8780-44e332749e26 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Lock "c7321021-15ea-47f4-a8ca-1045f2966394" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.286s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1083.741867] env[69328]: DEBUG oslo_vmware.api [None req-51c8a109-ad37-4ba7-add6-55ce64a11cde tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3273976, 'name': ReconfigVM_Task, 'duration_secs': 5.864426} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.742820] env[69328]: DEBUG oslo_concurrency.lockutils [None req-51c8a109-ad37-4ba7-add6-55ce64a11cde tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Releasing lock "dc050589-e37a-4798-9532-df4ecfab7eb1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1083.743093] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-51c8a109-ad37-4ba7-add6-55ce64a11cde tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Reconfigured VM to detach interface {{(pid=69328) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1083.893608] env[69328]: DEBUG oslo_vmware.api [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ce1b3f-49ad-fed4-0559-cf907e130dc5, 'name': SearchDatastore_Task, 'duration_secs': 0.011583} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.893979] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1083.893979] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee/5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1083.894207] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fc576d55-d657-4aca-aaf0-0b21b204263c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.905264] env[69328]: DEBUG oslo_vmware.api [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Waiting for the task: (returnval){ [ 1083.905264] env[69328]: value = "task-3273988" [ 1083.905264] env[69328]: _type = "Task" [ 1083.905264] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.916246] env[69328]: DEBUG oslo_vmware.api [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Task: {'id': task-3273988, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.085390] env[69328]: DEBUG oslo_concurrency.lockutils [None req-13348fc2-8660-46bd-ac11-bf21353f0390 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquiring lock "9f6f8e97-cb21-4984-af08-a63ea4578eef" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1084.085726] env[69328]: DEBUG oslo_concurrency.lockutils [None req-13348fc2-8660-46bd-ac11-bf21353f0390 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Lock "9f6f8e97-cb21-4984-af08-a63ea4578eef" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1084.086186] env[69328]: DEBUG oslo_concurrency.lockutils [None req-13348fc2-8660-46bd-ac11-bf21353f0390 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquiring lock "9f6f8e97-cb21-4984-af08-a63ea4578eef-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1084.086334] env[69328]: DEBUG oslo_concurrency.lockutils [None req-13348fc2-8660-46bd-ac11-bf21353f0390 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Lock "9f6f8e97-cb21-4984-af08-a63ea4578eef-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1084.086549] env[69328]: DEBUG oslo_concurrency.lockutils [None req-13348fc2-8660-46bd-ac11-bf21353f0390 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Lock "9f6f8e97-cb21-4984-af08-a63ea4578eef-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1084.089518] env[69328]: INFO nova.compute.manager [None req-13348fc2-8660-46bd-ac11-bf21353f0390 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] Terminating instance [ 1084.096265] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Applying migration context for instance 204286d7-c806-48cb-85e9-b2a78571777c as it has an incoming, in-progress migration 65843494-d4bc-40c8-866b-e1e3d3443745. Migration status is finished {{(pid=69328) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1084.098374] env[69328]: INFO nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Updating resource usage from migration 65843494-d4bc-40c8-866b-e1e3d3443745 [ 1084.102727] env[69328]: DEBUG nova.compute.manager [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: aaa9deb3-9a52-43e3-bf9b-a53922439be2] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1084.142172] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance b0a1441c-81e2-4131-a2ff-f5042d559d9f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1084.142355] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance a0b663eb-31b0-4de1-94bc-660a7d9c1c7b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1084.142504] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance f1be93b2-08db-41fe-87c4-f4e5f964cfa4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1084.142709] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance c751ef77-c3be-46cd-b7eb-fe139bf0998b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1084.142940] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 1413dcfe-3570-4657-b811-81a1acc159d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1084.143198] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 65e38a02-880b-46e2-8866-645a9fc17c7a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1084.143358] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance a95d01cf-c26b-466c-a5b6-a7e43f0321fa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1084.143602] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 52c87371-4142-40d6-ac68-804aabd9f823 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1084.143602] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 9f6f8e97-cb21-4984-af08-a63ea4578eef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1084.143708] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance dc050589-e37a-4798-9532-df4ecfab7eb1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1084.144021] env[69328]: WARNING nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance dd43adb3-b073-483a-81dd-69df7f746874 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1084.144021] env[69328]: WARNING nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance a0952fdf-5570-4112-bc4d-e9f9cee1599c is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1084.144162] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 76210566-12d7-4f6a-afa1-6329e87e0f85 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1084.144438] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 071c1837-9d0b-4b69-b16e-991b300385fb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1084.144438] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1084.144566] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Migration 65843494-d4bc-40c8-866b-e1e3d3443745 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1084.144619] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 204286d7-c806-48cb-85e9-b2a78571777c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1084.144802] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 275ef1ed-8e60-4151-b548-e22e5bd8efe2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1084.144802] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance c1829dcf-3608-4955-bd50-eb9ee27d38e1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1084.144912] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1084.423895] env[69328]: DEBUG oslo_vmware.api [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Task: {'id': task-3273988, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.484180] env[69328]: DEBUG nova.compute.manager [req-1ceb46ec-a6f0-4f35-83a5-4781ee4ebf58 req-7a44a74f-3c8d-407d-a982-73b0ab75d492 service nova] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Received event network-vif-deleted-55e62774-2eb7-4bcb-92f6-a63fc6216cda {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1084.485633] env[69328]: INFO nova.compute.manager [req-1ceb46ec-a6f0-4f35-83a5-4781ee4ebf58 req-7a44a74f-3c8d-407d-a982-73b0ab75d492 service nova] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Neutron deleted interface 55e62774-2eb7-4bcb-92f6-a63fc6216cda; detaching it from the instance and deleting it from the info cache [ 1084.486015] env[69328]: DEBUG nova.network.neutron [req-1ceb46ec-a6f0-4f35-83a5-4781ee4ebf58 req-7a44a74f-3c8d-407d-a982-73b0ab75d492 service nova] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Updating instance_info_cache with network_info: [{"id": "95776220-5fd9-42a1-8bf9-cfb9fe49d62d", "address": "fa:16:3e:0c:f4:26", "network": {"id": "aed15283-4a79-4e99-8b6c-49cf754138de", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1271042528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.130", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30209bc93a4042488f15c73b7e4733d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap95776220-5f", "ovs_interfaceid": "95776220-5fd9-42a1-8bf9-cfb9fe49d62d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "2febedad-c6fa-48cf-893b-6baa5b6ddcd6", "address": "fa:16:3e:d1:26:ae", "network": {"id": "aed15283-4a79-4e99-8b6c-49cf754138de", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1271042528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30209bc93a4042488f15c73b7e4733d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2febedad-c6", "ovs_interfaceid": "2febedad-c6fa-48cf-893b-6baa5b6ddcd6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1084.595687] env[69328]: DEBUG nova.compute.manager [None req-13348fc2-8660-46bd-ac11-bf21353f0390 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1084.598373] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-13348fc2-8660-46bd-ac11-bf21353f0390 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1084.598373] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c499c91d-1f75-4a50-9e64-ce4c7633993b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.609538] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-13348fc2-8660-46bd-ac11-bf21353f0390 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1084.610020] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6abb6356-9fdf-48cc-ae7a-3a799ba3d98d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.617984] env[69328]: DEBUG oslo_vmware.api [None req-13348fc2-8660-46bd-ac11-bf21353f0390 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 1084.617984] env[69328]: value = "task-3273989" [ 1084.617984] env[69328]: _type = "Task" [ 1084.617984] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.630778] env[69328]: DEBUG oslo_vmware.api [None req-13348fc2-8660-46bd-ac11-bf21353f0390 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273989, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.638583] env[69328]: DEBUG oslo_concurrency.lockutils [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1084.647741] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance fb2d04d8-cff6-414c-9d50-3ab61729546d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1084.918631] env[69328]: DEBUG oslo_vmware.api [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Task: {'id': task-3273988, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.532334} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.918971] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee/5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1084.919261] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1084.919616] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d67902e9-4488-4454-b02a-810b6ba5e958 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.929840] env[69328]: DEBUG oslo_vmware.api [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Waiting for the task: (returnval){ [ 1084.929840] env[69328]: value = "task-3273992" [ 1084.929840] env[69328]: _type = "Task" [ 1084.929840] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.943102] env[69328]: DEBUG oslo_vmware.api [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Task: {'id': task-3273992, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.989568] env[69328]: DEBUG oslo_concurrency.lockutils [req-1ceb46ec-a6f0-4f35-83a5-4781ee4ebf58 req-7a44a74f-3c8d-407d-a982-73b0ab75d492 service nova] Acquiring lock "dc050589-e37a-4798-9532-df4ecfab7eb1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1084.990171] env[69328]: DEBUG oslo_concurrency.lockutils [req-1ceb46ec-a6f0-4f35-83a5-4781ee4ebf58 req-7a44a74f-3c8d-407d-a982-73b0ab75d492 service nova] Acquired lock "dc050589-e37a-4798-9532-df4ecfab7eb1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1084.991179] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d60a1b6c-e21d-4a39-b21f-b2b5e1fed189 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.016918] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3445b841-96c5-437a-a2fd-3bd1f57fcdd9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.046328] env[69328]: DEBUG nova.virt.vmwareapi.vmops [req-1ceb46ec-a6f0-4f35-83a5-4781ee4ebf58 req-7a44a74f-3c8d-407d-a982-73b0ab75d492 service nova] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Reconfiguring VM to detach interface {{(pid=69328) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1085.047924] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f9e8da46-5962-41ad-9ba7-1c850426bcbb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.070670] env[69328]: DEBUG nova.compute.manager [req-0fce00ee-a6f3-4359-a626-97234d79a216 req-71fa809c-4f05-4608-b1e2-3f4ccc508394 service nova] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Received event network-changed-23a25695-a7ad-41dd-b5a1-29ee8d22538e {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1085.070670] env[69328]: DEBUG nova.compute.manager [req-0fce00ee-a6f3-4359-a626-97234d79a216 req-71fa809c-4f05-4608-b1e2-3f4ccc508394 service nova] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Refreshing instance network info cache due to event network-changed-23a25695-a7ad-41dd-b5a1-29ee8d22538e. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1085.070670] env[69328]: DEBUG oslo_concurrency.lockutils [req-0fce00ee-a6f3-4359-a626-97234d79a216 req-71fa809c-4f05-4608-b1e2-3f4ccc508394 service nova] Acquiring lock "refresh_cache-c1829dcf-3608-4955-bd50-eb9ee27d38e1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1085.070670] env[69328]: DEBUG oslo_concurrency.lockutils [req-0fce00ee-a6f3-4359-a626-97234d79a216 req-71fa809c-4f05-4608-b1e2-3f4ccc508394 service nova] Acquired lock "refresh_cache-c1829dcf-3608-4955-bd50-eb9ee27d38e1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1085.070670] env[69328]: DEBUG nova.network.neutron [req-0fce00ee-a6f3-4359-a626-97234d79a216 req-71fa809c-4f05-4608-b1e2-3f4ccc508394 service nova] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Refreshing network info cache for port 23a25695-a7ad-41dd-b5a1-29ee8d22538e {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1085.070670] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0d214c25-ddc2-44fd-b498-d433b0aaa175 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "dc050589-e37a-4798-9532-df4ecfab7eb1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1085.078097] env[69328]: DEBUG oslo_vmware.api [req-1ceb46ec-a6f0-4f35-83a5-4781ee4ebf58 req-7a44a74f-3c8d-407d-a982-73b0ab75d492 service nova] Waiting for the task: (returnval){ [ 1085.078097] env[69328]: value = "task-3273995" [ 1085.078097] env[69328]: _type = "Task" [ 1085.078097] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.078599] env[69328]: DEBUG oslo_concurrency.lockutils [None req-51c8a109-ad37-4ba7-add6-55ce64a11cde tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "refresh_cache-dc050589-e37a-4798-9532-df4ecfab7eb1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1085.078751] env[69328]: DEBUG oslo_concurrency.lockutils [None req-51c8a109-ad37-4ba7-add6-55ce64a11cde tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquired lock "refresh_cache-dc050589-e37a-4798-9532-df4ecfab7eb1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1085.078934] env[69328]: DEBUG nova.network.neutron [None req-51c8a109-ad37-4ba7-add6-55ce64a11cde tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1085.091296] env[69328]: DEBUG oslo_vmware.api [req-1ceb46ec-a6f0-4f35-83a5-4781ee4ebf58 req-7a44a74f-3c8d-407d-a982-73b0ab75d492 service nova] Task: {'id': task-3273995, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.128298] env[69328]: DEBUG oslo_vmware.api [None req-13348fc2-8660-46bd-ac11-bf21353f0390 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273989, 'name': PowerOffVM_Task, 'duration_secs': 0.217643} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.128607] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-13348fc2-8660-46bd-ac11-bf21353f0390 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1085.128778] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-13348fc2-8660-46bd-ac11-bf21353f0390 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1085.129053] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-65255e64-e397-4524-b559-a576e95756ed {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.151019] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance aaa9deb3-9a52-43e3-bf9b-a53922439be2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1085.151360] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Total usable vcpus: 48, total allocated vcpus: 18 {{(pid=69328) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1085.151472] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4032MB phys_disk=200GB used_disk=18GB total_vcpus=48 used_vcpus=18 pci_stats=[] {{(pid=69328) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1085.442145] env[69328]: DEBUG oslo_vmware.api [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Task: {'id': task-3273992, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077755} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.442408] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1085.443259] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2966ff8c-cf32-4e5f-85ef-6fa0bba864bc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.448975] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e89d90e-8c9e-4873-87f7-5e0756f16ac1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.466630] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Reconfiguring VM instance instance-00000069 to attach disk [datastore1] 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee/5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1085.467346] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d971b874-0ade-4097-917d-51a37f24d736 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.485262] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-203e34f7-6f6a-4a61-aad2-48adccaa6bd0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.490146] env[69328]: DEBUG oslo_vmware.api [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Waiting for the task: (returnval){ [ 1085.490146] env[69328]: value = "task-3273997" [ 1085.490146] env[69328]: _type = "Task" [ 1085.490146] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.519557] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd80e107-826d-4e1e-9150-90d88f99b3dc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.525579] env[69328]: DEBUG oslo_vmware.api [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Task: {'id': task-3273997, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.531126] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d155ef1-d019-4e0f-a71d-d6335fd380ab {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.547485] env[69328]: DEBUG nova.compute.provider_tree [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1085.590822] env[69328]: DEBUG oslo_vmware.api [req-1ceb46ec-a6f0-4f35-83a5-4781ee4ebf58 req-7a44a74f-3c8d-407d-a982-73b0ab75d492 service nova] Task: {'id': task-3273995, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.776865] env[69328]: DEBUG nova.network.neutron [req-0fce00ee-a6f3-4359-a626-97234d79a216 req-71fa809c-4f05-4608-b1e2-3f4ccc508394 service nova] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Updated VIF entry in instance network info cache for port 23a25695-a7ad-41dd-b5a1-29ee8d22538e. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1085.777235] env[69328]: DEBUG nova.network.neutron [req-0fce00ee-a6f3-4359-a626-97234d79a216 req-71fa809c-4f05-4608-b1e2-3f4ccc508394 service nova] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Updating instance_info_cache with network_info: [{"id": "23a25695-a7ad-41dd-b5a1-29ee8d22538e", "address": "fa:16:3e:4f:8b:c0", "network": {"id": "ce3bec03-01f9-4ac9-80e4-bfb944c0bb66", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-545997027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87581f423dc64e4fb9fe1d51ebc68597", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23a25695-a7", "ovs_interfaceid": "23a25695-a7ad-41dd-b5a1-29ee8d22538e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1085.807803] env[69328]: INFO nova.network.neutron [None req-51c8a109-ad37-4ba7-add6-55ce64a11cde tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Port 2febedad-c6fa-48cf-893b-6baa5b6ddcd6 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1085.808194] env[69328]: DEBUG nova.network.neutron [None req-51c8a109-ad37-4ba7-add6-55ce64a11cde tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Updating instance_info_cache with network_info: [{"id": "95776220-5fd9-42a1-8bf9-cfb9fe49d62d", "address": "fa:16:3e:0c:f4:26", "network": {"id": "aed15283-4a79-4e99-8b6c-49cf754138de", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1271042528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.130", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30209bc93a4042488f15c73b7e4733d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap95776220-5f", "ovs_interfaceid": "95776220-5fd9-42a1-8bf9-cfb9fe49d62d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1086.001635] env[69328]: DEBUG oslo_vmware.api [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Task: {'id': task-3273997, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.043137] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-13348fc2-8660-46bd-ac11-bf21353f0390 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1086.043472] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-13348fc2-8660-46bd-ac11-bf21353f0390 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1086.043665] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-13348fc2-8660-46bd-ac11-bf21353f0390 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Deleting the datastore file [datastore1] 9f6f8e97-cb21-4984-af08-a63ea4578eef {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1086.043942] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1b101a1d-9009-4826-8a98-670b0850df1f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.051576] env[69328]: DEBUG nova.scheduler.client.report [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1086.055326] env[69328]: DEBUG oslo_vmware.api [None req-13348fc2-8660-46bd-ac11-bf21353f0390 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 1086.055326] env[69328]: value = "task-3273998" [ 1086.055326] env[69328]: _type = "Task" [ 1086.055326] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.065132] env[69328]: DEBUG oslo_vmware.api [None req-13348fc2-8660-46bd-ac11-bf21353f0390 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273998, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.090925] env[69328]: DEBUG oslo_vmware.api [req-1ceb46ec-a6f0-4f35-83a5-4781ee4ebf58 req-7a44a74f-3c8d-407d-a982-73b0ab75d492 service nova] Task: {'id': task-3273995, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.280307] env[69328]: DEBUG oslo_concurrency.lockutils [req-0fce00ee-a6f3-4359-a626-97234d79a216 req-71fa809c-4f05-4608-b1e2-3f4ccc508394 service nova] Releasing lock "refresh_cache-c1829dcf-3608-4955-bd50-eb9ee27d38e1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1086.283925] env[69328]: DEBUG nova.compute.manager [req-0fce00ee-a6f3-4359-a626-97234d79a216 req-71fa809c-4f05-4608-b1e2-3f4ccc508394 service nova] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Received event network-vif-deleted-2febedad-c6fa-48cf-893b-6baa5b6ddcd6 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1086.284145] env[69328]: INFO nova.compute.manager [req-0fce00ee-a6f3-4359-a626-97234d79a216 req-71fa809c-4f05-4608-b1e2-3f4ccc508394 service nova] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Neutron deleted interface 2febedad-c6fa-48cf-893b-6baa5b6ddcd6; detaching it from the instance and deleting it from the info cache [ 1086.284476] env[69328]: DEBUG nova.network.neutron [req-0fce00ee-a6f3-4359-a626-97234d79a216 req-71fa809c-4f05-4608-b1e2-3f4ccc508394 service nova] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Updating instance_info_cache with network_info: [{"id": "95776220-5fd9-42a1-8bf9-cfb9fe49d62d", "address": "fa:16:3e:0c:f4:26", "network": {"id": "aed15283-4a79-4e99-8b6c-49cf754138de", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1271042528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.130", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30209bc93a4042488f15c73b7e4733d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap95776220-5f", "ovs_interfaceid": "95776220-5fd9-42a1-8bf9-cfb9fe49d62d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1086.311271] env[69328]: DEBUG oslo_concurrency.lockutils [None req-51c8a109-ad37-4ba7-add6-55ce64a11cde tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Releasing lock "refresh_cache-dc050589-e37a-4798-9532-df4ecfab7eb1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1086.502054] env[69328]: DEBUG oslo_vmware.api [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Task: {'id': task-3273997, 'name': ReconfigVM_Task, 'duration_secs': 0.847546} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.502345] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Reconfigured VM instance instance-00000069 to attach disk [datastore1] 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee/5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1086.503007] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1398353a-370b-402b-93af-bfa0793793ee {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.509884] env[69328]: DEBUG oslo_vmware.api [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Waiting for the task: (returnval){ [ 1086.509884] env[69328]: value = "task-3273999" [ 1086.509884] env[69328]: _type = "Task" [ 1086.509884] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.518078] env[69328]: DEBUG oslo_vmware.api [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Task: {'id': task-3273999, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.557324] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69328) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1086.557549] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.480s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1086.557920] env[69328]: DEBUG oslo_concurrency.lockutils [None req-495c950a-50b7-49d3-b9c8-8697742e1ae4 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 8.323s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1086.573580] env[69328]: DEBUG oslo_vmware.api [None req-13348fc2-8660-46bd-ac11-bf21353f0390 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3273998, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.138448} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.575035] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-13348fc2-8660-46bd-ac11-bf21353f0390 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1086.575946] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-13348fc2-8660-46bd-ac11-bf21353f0390 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1086.575946] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-13348fc2-8660-46bd-ac11-bf21353f0390 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1086.575946] env[69328]: INFO nova.compute.manager [None req-13348fc2-8660-46bd-ac11-bf21353f0390 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] Took 1.98 seconds to destroy the instance on the hypervisor. [ 1086.575946] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-13348fc2-8660-46bd-ac11-bf21353f0390 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1086.576391] env[69328]: DEBUG nova.compute.manager [-] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1086.576485] env[69328]: DEBUG nova.network.neutron [-] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1086.590930] env[69328]: DEBUG oslo_vmware.api [req-1ceb46ec-a6f0-4f35-83a5-4781ee4ebf58 req-7a44a74f-3c8d-407d-a982-73b0ab75d492 service nova] Task: {'id': task-3273995, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.788183] env[69328]: DEBUG oslo_concurrency.lockutils [req-0fce00ee-a6f3-4359-a626-97234d79a216 req-71fa809c-4f05-4608-b1e2-3f4ccc508394 service nova] Acquiring lock "dc050589-e37a-4798-9532-df4ecfab7eb1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1086.818188] env[69328]: DEBUG oslo_concurrency.lockutils [None req-51c8a109-ad37-4ba7-add6-55ce64a11cde tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "interface-dc050589-e37a-4798-9532-df4ecfab7eb1-2febedad-c6fa-48cf-893b-6baa5b6ddcd6" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.695s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1086.845394] env[69328]: DEBUG nova.compute.manager [req-7358e2ba-6e90-43b8-adf0-4135c475f769 req-300a42d8-82b5-4770-bdd5-2ecc3a43485e service nova] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] Received event network-vif-deleted-b7d14440-051d-478f-8bda-be652bd1f72f {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1086.845658] env[69328]: INFO nova.compute.manager [req-7358e2ba-6e90-43b8-adf0-4135c475f769 req-300a42d8-82b5-4770-bdd5-2ecc3a43485e service nova] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] Neutron deleted interface b7d14440-051d-478f-8bda-be652bd1f72f; detaching it from the instance and deleting it from the info cache [ 1086.845802] env[69328]: DEBUG nova.network.neutron [req-7358e2ba-6e90-43b8-adf0-4135c475f769 req-300a42d8-82b5-4770-bdd5-2ecc3a43485e service nova] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1087.021975] env[69328]: DEBUG oslo_vmware.api [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Task: {'id': task-3273999, 'name': Rename_Task, 'duration_secs': 0.149469} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.022316] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1087.022581] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e308bd39-49c6-48c6-9fbb-2250c1ce5792 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.032183] env[69328]: DEBUG oslo_vmware.api [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Waiting for the task: (returnval){ [ 1087.032183] env[69328]: value = "task-3274000" [ 1087.032183] env[69328]: _type = "Task" [ 1087.032183] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.043247] env[69328]: DEBUG oslo_vmware.api [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Task: {'id': task-3274000, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.095494] env[69328]: DEBUG oslo_vmware.api [req-1ceb46ec-a6f0-4f35-83a5-4781ee4ebf58 req-7a44a74f-3c8d-407d-a982-73b0ab75d492 service nova] Task: {'id': task-3273995, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.322387] env[69328]: DEBUG nova.network.neutron [-] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1087.349041] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2f153d81-6c2a-4694-9279-6e6e0bbaeec1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.364250] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaaa345d-6e01-495a-a3c0-597f9a416de6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.389873] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30947a2e-a1b2-4da8-b7c1-b3fea01a4a03 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.410374] env[69328]: DEBUG nova.compute.manager [req-7358e2ba-6e90-43b8-adf0-4135c475f769 req-300a42d8-82b5-4770-bdd5-2ecc3a43485e service nova] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] Detach interface failed, port_id=b7d14440-051d-478f-8bda-be652bd1f72f, reason: Instance 9f6f8e97-cb21-4984-af08-a63ea4578eef could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1087.414217] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b633d7ba-1329-437a-8384-6940b67b4634 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.447721] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8d95fef-378c-4ecc-bd41-16567bc1696d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.459097] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bacc1d7-4dd8-476c-8de0-57006568020b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.476595] env[69328]: DEBUG nova.compute.provider_tree [None req-495c950a-50b7-49d3-b9c8-8697742e1ae4 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1087.545482] env[69328]: DEBUG oslo_vmware.api [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Task: {'id': task-3274000, 'name': PowerOnVM_Task, 'duration_secs': 0.499274} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.546094] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1087.546094] env[69328]: INFO nova.compute.manager [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Took 6.37 seconds to spawn the instance on the hypervisor. [ 1087.546286] env[69328]: DEBUG nova.compute.manager [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1087.547043] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f56d030-2b74-43fd-bfc6-1a6b1589d65b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.592314] env[69328]: DEBUG oslo_vmware.api [req-1ceb46ec-a6f0-4f35-83a5-4781ee4ebf58 req-7a44a74f-3c8d-407d-a982-73b0ab75d492 service nova] Task: {'id': task-3273995, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.825868] env[69328]: INFO nova.compute.manager [-] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] Took 1.25 seconds to deallocate network for instance. [ 1087.980829] env[69328]: DEBUG nova.scheduler.client.report [None req-495c950a-50b7-49d3-b9c8-8697742e1ae4 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1088.068385] env[69328]: INFO nova.compute.manager [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Took 19.87 seconds to build instance. [ 1088.094639] env[69328]: DEBUG oslo_vmware.api [req-1ceb46ec-a6f0-4f35-83a5-4781ee4ebf58 req-7a44a74f-3c8d-407d-a982-73b0ab75d492 service nova] Task: {'id': task-3273995, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.332928] env[69328]: DEBUG oslo_concurrency.lockutils [None req-13348fc2-8660-46bd-ac11-bf21353f0390 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1088.570709] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b59bec63-919c-4501-98e6-c1a6b6e20bf8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Lock "5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.396s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1088.594994] env[69328]: DEBUG oslo_vmware.api [req-1ceb46ec-a6f0-4f35-83a5-4781ee4ebf58 req-7a44a74f-3c8d-407d-a982-73b0ab75d492 service nova] Task: {'id': task-3273995, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.992555] env[69328]: DEBUG oslo_concurrency.lockutils [None req-495c950a-50b7-49d3-b9c8-8697742e1ae4 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.434s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1088.995329] env[69328]: DEBUG oslo_concurrency.lockutils [None req-78bb2e64-9e5c-475f-a0c8-9aa88090488b tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.820s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1088.995521] env[69328]: DEBUG oslo_concurrency.lockutils [None req-78bb2e64-9e5c-475f-a0c8-9aa88090488b tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1088.997552] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f9081d18-bbdc-4099-8e52-037a88cc3773 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.156s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1088.997734] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f9081d18-bbdc-4099-8e52-037a88cc3773 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1088.999834] env[69328]: DEBUG oslo_concurrency.lockutils [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.859s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1089.001329] env[69328]: INFO nova.compute.claims [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1089.025259] env[69328]: INFO nova.scheduler.client.report [None req-f9081d18-bbdc-4099-8e52-037a88cc3773 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Deleted allocations for instance a0952fdf-5570-4112-bc4d-e9f9cee1599c [ 1089.027129] env[69328]: INFO nova.scheduler.client.report [None req-78bb2e64-9e5c-475f-a0c8-9aa88090488b tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Deleted allocations for instance dd43adb3-b073-483a-81dd-69df7f746874 [ 1089.094026] env[69328]: DEBUG oslo_vmware.api [req-1ceb46ec-a6f0-4f35-83a5-4781ee4ebf58 req-7a44a74f-3c8d-407d-a982-73b0ab75d492 service nova] Task: {'id': task-3273995, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.207065] env[69328]: INFO nova.compute.manager [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Rebuilding instance [ 1089.246141] env[69328]: DEBUG nova.compute.manager [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1089.246987] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2af10dbd-4168-41c2-82f4-7c95c8c38656 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.537578] env[69328]: DEBUG oslo_concurrency.lockutils [None req-78bb2e64-9e5c-475f-a0c8-9aa88090488b tempest-ListServerFiltersTestJSON-171587152 tempest-ListServerFiltersTestJSON-171587152-project-member] Lock "dd43adb3-b073-483a-81dd-69df7f746874" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.767s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1089.538351] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f9081d18-bbdc-4099-8e52-037a88cc3773 tempest-ServersNegativeTestJSON-1833125090 tempest-ServersNegativeTestJSON-1833125090-project-member] Lock "a0952fdf-5570-4112-bc4d-e9f9cee1599c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.204s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1089.581036] env[69328]: INFO nova.scheduler.client.report [None req-495c950a-50b7-49d3-b9c8-8697742e1ae4 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Deleted allocation for migration 65843494-d4bc-40c8-866b-e1e3d3443745 [ 1089.598722] env[69328]: DEBUG oslo_vmware.api [req-1ceb46ec-a6f0-4f35-83a5-4781ee4ebf58 req-7a44a74f-3c8d-407d-a982-73b0ab75d492 service nova] Task: {'id': task-3273995, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.097663] env[69328]: DEBUG oslo_concurrency.lockutils [None req-495c950a-50b7-49d3-b9c8-8697742e1ae4 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "204286d7-c806-48cb-85e9-b2a78571777c" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 14.931s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1090.106029] env[69328]: DEBUG oslo_vmware.api [req-1ceb46ec-a6f0-4f35-83a5-4781ee4ebf58 req-7a44a74f-3c8d-407d-a982-73b0ab75d492 service nova] Task: {'id': task-3273995, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.261896] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1090.263581] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d91deae4-35d0-4628-a9f7-183c229be14f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.271297] env[69328]: DEBUG oslo_vmware.api [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Waiting for the task: (returnval){ [ 1090.271297] env[69328]: value = "task-3274005" [ 1090.271297] env[69328]: _type = "Task" [ 1090.271297] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.282865] env[69328]: DEBUG oslo_vmware.api [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Task: {'id': task-3274005, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.294311] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d988472-c230-4f90-96d4-40f8be7b7b8b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.302782] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66c1f1f1-a7de-47a5-80f7-061c6fdb2fcb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.338335] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-261e1eb7-02c2-404c-84a6-8ca3273f50a6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.348520] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1a2ce92-9026-4827-8ba8-7167f4c2bb87 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.363223] env[69328]: DEBUG nova.compute.provider_tree [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1090.598304] env[69328]: DEBUG oslo_vmware.api [req-1ceb46ec-a6f0-4f35-83a5-4781ee4ebf58 req-7a44a74f-3c8d-407d-a982-73b0ab75d492 service nova] Task: {'id': task-3273995, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.789395] env[69328]: DEBUG oslo_vmware.api [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Task: {'id': task-3274005, 'name': PowerOffVM_Task, 'duration_secs': 0.120792} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.789395] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1090.789601] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1090.790403] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9db669e7-3746-473e-b0f4-3d6c0069f4ad {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.800071] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1090.800331] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-61e4acab-2c84-43c5-ae30-a8167de97a3e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.836430] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1090.836707] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1090.836910] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Deleting the datastore file [datastore1] 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1090.837199] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-84062753-304f-4d53-9fea-d37b0dc7ad14 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.844791] env[69328]: DEBUG oslo_vmware.api [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Waiting for the task: (returnval){ [ 1090.844791] env[69328]: value = "task-3274007" [ 1090.844791] env[69328]: _type = "Task" [ 1090.844791] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.854409] env[69328]: DEBUG oslo_vmware.api [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Task: {'id': task-3274007, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.866570] env[69328]: DEBUG nova.scheduler.client.report [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1091.104821] env[69328]: DEBUG oslo_vmware.api [req-1ceb46ec-a6f0-4f35-83a5-4781ee4ebf58 req-7a44a74f-3c8d-407d-a982-73b0ab75d492 service nova] Task: {'id': task-3273995, 'name': ReconfigVM_Task} progress is 18%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.208036] env[69328]: DEBUG oslo_concurrency.lockutils [None req-495c950a-50b7-49d3-b9c8-8697742e1ae4 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "204286d7-c806-48cb-85e9-b2a78571777c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1091.208316] env[69328]: DEBUG oslo_concurrency.lockutils [None req-495c950a-50b7-49d3-b9c8-8697742e1ae4 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "204286d7-c806-48cb-85e9-b2a78571777c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1091.208538] env[69328]: DEBUG oslo_concurrency.lockutils [None req-495c950a-50b7-49d3-b9c8-8697742e1ae4 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "204286d7-c806-48cb-85e9-b2a78571777c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1091.208894] env[69328]: DEBUG oslo_concurrency.lockutils [None req-495c950a-50b7-49d3-b9c8-8697742e1ae4 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "204286d7-c806-48cb-85e9-b2a78571777c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1091.208894] env[69328]: DEBUG oslo_concurrency.lockutils [None req-495c950a-50b7-49d3-b9c8-8697742e1ae4 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "204286d7-c806-48cb-85e9-b2a78571777c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1091.211474] env[69328]: INFO nova.compute.manager [None req-495c950a-50b7-49d3-b9c8-8697742e1ae4 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Terminating instance [ 1091.357220] env[69328]: DEBUG oslo_vmware.api [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Task: {'id': task-3274007, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.135978} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.357542] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1091.357767] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1091.357981] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1091.372729] env[69328]: DEBUG oslo_concurrency.lockutils [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.373s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1091.373401] env[69328]: DEBUG nova.compute.manager [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1091.376657] env[69328]: DEBUG oslo_concurrency.lockutils [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.738s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1091.378352] env[69328]: INFO nova.compute.claims [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: aaa9deb3-9a52-43e3-bf9b-a53922439be2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1091.602680] env[69328]: DEBUG oslo_vmware.api [req-1ceb46ec-a6f0-4f35-83a5-4781ee4ebf58 req-7a44a74f-3c8d-407d-a982-73b0ab75d492 service nova] Task: {'id': task-3273995, 'name': ReconfigVM_Task} progress is 18%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.715283] env[69328]: DEBUG nova.compute.manager [None req-495c950a-50b7-49d3-b9c8-8697742e1ae4 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1091.715505] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-495c950a-50b7-49d3-b9c8-8697742e1ae4 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1091.716454] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-020f6a52-7d70-44de-8a6a-5a28b1e73439 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.724899] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-495c950a-50b7-49d3-b9c8-8697742e1ae4 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1091.724899] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c04c1ed2-f5b8-4e33-b014-213d41039263 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.734024] env[69328]: DEBUG oslo_vmware.api [None req-495c950a-50b7-49d3-b9c8-8697742e1ae4 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 1091.734024] env[69328]: value = "task-3274009" [ 1091.734024] env[69328]: _type = "Task" [ 1091.734024] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.743042] env[69328]: DEBUG oslo_vmware.api [None req-495c950a-50b7-49d3-b9c8-8697742e1ae4 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3274009, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.885598] env[69328]: DEBUG nova.compute.utils [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1091.894036] env[69328]: DEBUG nova.compute.manager [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1091.894239] env[69328]: DEBUG nova.network.neutron [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1091.973377] env[69328]: DEBUG nova.policy [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b89b3fe8b1204fb79a7045a0de96e001', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '64a88af5392e4ee383413e85730a84d1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 1092.099403] env[69328]: DEBUG oslo_vmware.api [req-1ceb46ec-a6f0-4f35-83a5-4781ee4ebf58 req-7a44a74f-3c8d-407d-a982-73b0ab75d492 service nova] Task: {'id': task-3273995, 'name': ReconfigVM_Task, 'duration_secs': 6.785875} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.099651] env[69328]: DEBUG oslo_concurrency.lockutils [req-1ceb46ec-a6f0-4f35-83a5-4781ee4ebf58 req-7a44a74f-3c8d-407d-a982-73b0ab75d492 service nova] Releasing lock "dc050589-e37a-4798-9532-df4ecfab7eb1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1092.099904] env[69328]: DEBUG nova.virt.vmwareapi.vmops [req-1ceb46ec-a6f0-4f35-83a5-4781ee4ebf58 req-7a44a74f-3c8d-407d-a982-73b0ab75d492 service nova] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Reconfigured VM to detach interface {{(pid=69328) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1092.100434] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0d214c25-ddc2-44fd-b498-d433b0aaa175 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "dc050589-e37a-4798-9532-df4ecfab7eb1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 7.031s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1092.100666] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0d214c25-ddc2-44fd-b498-d433b0aaa175 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "dc050589-e37a-4798-9532-df4ecfab7eb1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1092.100879] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0d214c25-ddc2-44fd-b498-d433b0aaa175 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "dc050589-e37a-4798-9532-df4ecfab7eb1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1092.101065] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0d214c25-ddc2-44fd-b498-d433b0aaa175 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "dc050589-e37a-4798-9532-df4ecfab7eb1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1092.103220] env[69328]: INFO nova.compute.manager [None req-0d214c25-ddc2-44fd-b498-d433b0aaa175 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Terminating instance [ 1092.242973] env[69328]: DEBUG oslo_vmware.api [None req-495c950a-50b7-49d3-b9c8-8697742e1ae4 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3274009, 'name': PowerOffVM_Task, 'duration_secs': 0.294605} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.242973] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-495c950a-50b7-49d3-b9c8-8697742e1ae4 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1092.243420] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-495c950a-50b7-49d3-b9c8-8697742e1ae4 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1092.243420] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-209682b1-8667-4331-b607-2fef5a7f624b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.289766] env[69328]: DEBUG nova.network.neutron [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Successfully created port: d0a9a5ba-8927-4de7-892b-8444448e4551 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1092.312397] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-495c950a-50b7-49d3-b9c8-8697742e1ae4 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1092.312716] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-495c950a-50b7-49d3-b9c8-8697742e1ae4 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1092.312947] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-495c950a-50b7-49d3-b9c8-8697742e1ae4 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Deleting the datastore file [datastore2] 204286d7-c806-48cb-85e9-b2a78571777c {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1092.313266] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-06cee033-f526-4045-8a02-dbc5b9f4fe8f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.321187] env[69328]: DEBUG oslo_vmware.api [None req-495c950a-50b7-49d3-b9c8-8697742e1ae4 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for the task: (returnval){ [ 1092.321187] env[69328]: value = "task-3274012" [ 1092.321187] env[69328]: _type = "Task" [ 1092.321187] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.330295] env[69328]: DEBUG oslo_vmware.api [None req-495c950a-50b7-49d3-b9c8-8697742e1ae4 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3274012, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.396304] env[69328]: DEBUG nova.virt.hardware [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1092.396557] env[69328]: DEBUG nova.virt.hardware [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1092.396742] env[69328]: DEBUG nova.virt.hardware [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1092.396931] env[69328]: DEBUG nova.virt.hardware [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1092.397113] env[69328]: DEBUG nova.virt.hardware [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1092.397268] env[69328]: DEBUG nova.virt.hardware [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1092.397506] env[69328]: DEBUG nova.virt.hardware [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1092.397684] env[69328]: DEBUG nova.virt.hardware [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1092.397857] env[69328]: DEBUG nova.virt.hardware [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1092.398050] env[69328]: DEBUG nova.virt.hardware [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1092.398247] env[69328]: DEBUG nova.virt.hardware [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1092.398802] env[69328]: DEBUG nova.compute.manager [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1092.405153] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9ff6ff2-aa2b-4607-9f38-3e97dddabadc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.414788] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-521213a4-b6e4-4f8a-a322-a1d825a56914 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.431715] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Instance VIF info [] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1092.437625] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1092.440621] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1092.443986] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8b59d2f0-303d-40a3-afcc-c25ad3075619 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.464211] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1092.464211] env[69328]: value = "task-3274013" [ 1092.464211] env[69328]: _type = "Task" [ 1092.464211] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.475266] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274013, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.607626] env[69328]: DEBUG nova.compute.manager [None req-0d214c25-ddc2-44fd-b498-d433b0aaa175 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1092.607930] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0d214c25-ddc2-44fd-b498-d433b0aaa175 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1092.608998] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ffc0c1c-b5ad-43e3-bf1a-62eb643e5a6d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.619037] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d214c25-ddc2-44fd-b498-d433b0aaa175 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1092.619037] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-425899f3-8b8f-4105-8db6-a9abf2ce4462 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.623750] env[69328]: DEBUG oslo_concurrency.lockutils [None req-043ca1df-0673-4893-aecd-944942bfdd78 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquiring lock "52c87371-4142-40d6-ac68-804aabd9f823" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1092.624028] env[69328]: DEBUG oslo_concurrency.lockutils [None req-043ca1df-0673-4893-aecd-944942bfdd78 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Lock "52c87371-4142-40d6-ac68-804aabd9f823" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1092.628732] env[69328]: DEBUG oslo_vmware.api [None req-0d214c25-ddc2-44fd-b498-d433b0aaa175 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for the task: (returnval){ [ 1092.628732] env[69328]: value = "task-3274014" [ 1092.628732] env[69328]: _type = "Task" [ 1092.628732] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.640559] env[69328]: DEBUG oslo_vmware.api [None req-0d214c25-ddc2-44fd-b498-d433b0aaa175 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274014, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.789218] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4076aa4-23b9-4fa3-831a-e34950c0a0ee {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.797569] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ad63bd4-563e-4d5c-9a55-1bbc026b4cc6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.839234] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f8420dc-0ee9-4636-b8c3-cf41a028777b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.848656] env[69328]: DEBUG oslo_vmware.api [None req-495c950a-50b7-49d3-b9c8-8697742e1ae4 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Task: {'id': task-3274012, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145963} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.850860] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-495c950a-50b7-49d3-b9c8-8697742e1ae4 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1092.851039] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-495c950a-50b7-49d3-b9c8-8697742e1ae4 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1092.851230] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-495c950a-50b7-49d3-b9c8-8697742e1ae4 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1092.851438] env[69328]: INFO nova.compute.manager [None req-495c950a-50b7-49d3-b9c8-8697742e1ae4 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1092.851695] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-495c950a-50b7-49d3-b9c8-8697742e1ae4 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1092.851951] env[69328]: DEBUG nova.compute.manager [-] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1092.852057] env[69328]: DEBUG nova.network.neutron [-] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1092.854741] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63f2fcd2-5dfc-4ac3-a93f-7d01de3a7a70 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.871156] env[69328]: DEBUG nova.compute.provider_tree [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1092.975532] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274013, 'name': CreateVM_Task, 'duration_secs': 0.313738} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.975532] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1092.975914] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1092.976027] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1092.976372] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1092.976937] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60e76c94-4b58-43e6-b1c4-83e9c093bfc9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.982108] env[69328]: DEBUG oslo_vmware.api [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Waiting for the task: (returnval){ [ 1092.982108] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5253c031-cb01-ec87-0c3d-67beb97ddac4" [ 1092.982108] env[69328]: _type = "Task" [ 1092.982108] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.990497] env[69328]: DEBUG oslo_vmware.api [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5253c031-cb01-ec87-0c3d-67beb97ddac4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.098055] env[69328]: DEBUG nova.compute.manager [req-b6fe1027-d721-4330-aa03-b1c420992e14 req-b91ca8a6-55ce-4d03-ba07-f3363f55beb1 service nova] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Received event network-vif-deleted-e957681a-e4bc-4b9a-b2b7-a4783ae059b8 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1093.098269] env[69328]: INFO nova.compute.manager [req-b6fe1027-d721-4330-aa03-b1c420992e14 req-b91ca8a6-55ce-4d03-ba07-f3363f55beb1 service nova] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Neutron deleted interface e957681a-e4bc-4b9a-b2b7-a4783ae059b8; detaching it from the instance and deleting it from the info cache [ 1093.098976] env[69328]: DEBUG nova.network.neutron [req-b6fe1027-d721-4330-aa03-b1c420992e14 req-b91ca8a6-55ce-4d03-ba07-f3363f55beb1 service nova] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1093.127340] env[69328]: DEBUG nova.compute.utils [None req-043ca1df-0673-4893-aecd-944942bfdd78 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1093.139571] env[69328]: DEBUG oslo_vmware.api [None req-0d214c25-ddc2-44fd-b498-d433b0aaa175 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274014, 'name': PowerOffVM_Task, 'duration_secs': 0.239194} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.140472] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d214c25-ddc2-44fd-b498-d433b0aaa175 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1093.140832] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0d214c25-ddc2-44fd-b498-d433b0aaa175 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1093.141127] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4a71c959-ddb8-4685-80cd-5c62ca6be378 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.211363] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0d214c25-ddc2-44fd-b498-d433b0aaa175 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1093.211511] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0d214c25-ddc2-44fd-b498-d433b0aaa175 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1093.211608] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d214c25-ddc2-44fd-b498-d433b0aaa175 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Deleting the datastore file [datastore1] dc050589-e37a-4798-9532-df4ecfab7eb1 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1093.211870] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1e59f84a-2f2b-433f-92d4-971b901b093b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.218995] env[69328]: DEBUG oslo_vmware.api [None req-0d214c25-ddc2-44fd-b498-d433b0aaa175 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for the task: (returnval){ [ 1093.218995] env[69328]: value = "task-3274016" [ 1093.218995] env[69328]: _type = "Task" [ 1093.218995] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.227599] env[69328]: DEBUG oslo_vmware.api [None req-0d214c25-ddc2-44fd-b498-d433b0aaa175 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274016, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.375031] env[69328]: DEBUG nova.scheduler.client.report [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1093.414791] env[69328]: DEBUG nova.compute.manager [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1093.442902] env[69328]: DEBUG nova.virt.hardware [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1093.443168] env[69328]: DEBUG nova.virt.hardware [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1093.443341] env[69328]: DEBUG nova.virt.hardware [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1093.443536] env[69328]: DEBUG nova.virt.hardware [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1093.443688] env[69328]: DEBUG nova.virt.hardware [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1093.443856] env[69328]: DEBUG nova.virt.hardware [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1093.444111] env[69328]: DEBUG nova.virt.hardware [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1093.444283] env[69328]: DEBUG nova.virt.hardware [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1093.444469] env[69328]: DEBUG nova.virt.hardware [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1093.444634] env[69328]: DEBUG nova.virt.hardware [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1093.444810] env[69328]: DEBUG nova.virt.hardware [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1093.445676] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34af35a9-5118-4d73-bfe5-2b3aee8dfc4b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.453855] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67b05f4d-572b-4596-9af9-811b6aa58a27 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.491594] env[69328]: DEBUG oslo_vmware.api [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5253c031-cb01-ec87-0c3d-67beb97ddac4, 'name': SearchDatastore_Task, 'duration_secs': 0.010965} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.491927] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1093.492181] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1093.492419] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1093.492567] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1093.492742] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1093.493017] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-480a9a65-a9dc-4f79-bd83-2337b3b3662d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.502601] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1093.502747] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1093.503465] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83fcd978-338e-4289-bd57-926bac03257a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.509096] env[69328]: DEBUG oslo_vmware.api [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Waiting for the task: (returnval){ [ 1093.509096] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ce7714-a8f3-ad7a-db2c-a09f0f598424" [ 1093.509096] env[69328]: _type = "Task" [ 1093.509096] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.516576] env[69328]: DEBUG oslo_vmware.api [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ce7714-a8f3-ad7a-db2c-a09f0f598424, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.584063] env[69328]: DEBUG nova.network.neutron [-] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1093.601549] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bda15e80-796c-4ad0-9242-3a35d93c2b77 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.611070] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82e04baf-93ce-4c9d-b43c-99bc807df2c0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.634654] env[69328]: DEBUG oslo_concurrency.lockutils [None req-043ca1df-0673-4893-aecd-944942bfdd78 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Lock "52c87371-4142-40d6-ac68-804aabd9f823" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.011s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1093.647843] env[69328]: DEBUG nova.compute.manager [req-b6fe1027-d721-4330-aa03-b1c420992e14 req-b91ca8a6-55ce-4d03-ba07-f3363f55beb1 service nova] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Detach interface failed, port_id=e957681a-e4bc-4b9a-b2b7-a4783ae059b8, reason: Instance 204286d7-c806-48cb-85e9-b2a78571777c could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1093.731066] env[69328]: DEBUG oslo_vmware.api [None req-0d214c25-ddc2-44fd-b498-d433b0aaa175 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274016, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167333} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.731374] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d214c25-ddc2-44fd-b498-d433b0aaa175 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1093.731807] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0d214c25-ddc2-44fd-b498-d433b0aaa175 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1093.731807] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0d214c25-ddc2-44fd-b498-d433b0aaa175 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1093.731933] env[69328]: INFO nova.compute.manager [None req-0d214c25-ddc2-44fd-b498-d433b0aaa175 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1093.732140] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0d214c25-ddc2-44fd-b498-d433b0aaa175 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1093.732334] env[69328]: DEBUG nova.compute.manager [-] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1093.732428] env[69328]: DEBUG nova.network.neutron [-] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1093.778706] env[69328]: DEBUG nova.compute.manager [req-e0d01477-c14f-4ee3-b1d9-c89b0d3c48b6 req-b166435a-c2c0-4da8-9232-189e1f3d0034 service nova] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Received event network-vif-plugged-d0a9a5ba-8927-4de7-892b-8444448e4551 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1093.778961] env[69328]: DEBUG oslo_concurrency.lockutils [req-e0d01477-c14f-4ee3-b1d9-c89b0d3c48b6 req-b166435a-c2c0-4da8-9232-189e1f3d0034 service nova] Acquiring lock "fb2d04d8-cff6-414c-9d50-3ab61729546d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1093.779251] env[69328]: DEBUG oslo_concurrency.lockutils [req-e0d01477-c14f-4ee3-b1d9-c89b0d3c48b6 req-b166435a-c2c0-4da8-9232-189e1f3d0034 service nova] Lock "fb2d04d8-cff6-414c-9d50-3ab61729546d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1093.779522] env[69328]: DEBUG oslo_concurrency.lockutils [req-e0d01477-c14f-4ee3-b1d9-c89b0d3c48b6 req-b166435a-c2c0-4da8-9232-189e1f3d0034 service nova] Lock "fb2d04d8-cff6-414c-9d50-3ab61729546d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1093.779615] env[69328]: DEBUG nova.compute.manager [req-e0d01477-c14f-4ee3-b1d9-c89b0d3c48b6 req-b166435a-c2c0-4da8-9232-189e1f3d0034 service nova] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] No waiting events found dispatching network-vif-plugged-d0a9a5ba-8927-4de7-892b-8444448e4551 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1093.779785] env[69328]: WARNING nova.compute.manager [req-e0d01477-c14f-4ee3-b1d9-c89b0d3c48b6 req-b166435a-c2c0-4da8-9232-189e1f3d0034 service nova] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Received unexpected event network-vif-plugged-d0a9a5ba-8927-4de7-892b-8444448e4551 for instance with vm_state building and task_state spawning. [ 1093.879444] env[69328]: DEBUG oslo_concurrency.lockutils [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.503s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1093.880117] env[69328]: DEBUG nova.compute.manager [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: aaa9deb3-9a52-43e3-bf9b-a53922439be2] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1093.885480] env[69328]: DEBUG nova.network.neutron [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Successfully updated port: d0a9a5ba-8927-4de7-892b-8444448e4551 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1093.885480] env[69328]: DEBUG oslo_concurrency.lockutils [None req-13348fc2-8660-46bd-ac11-bf21353f0390 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.552s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1093.885480] env[69328]: DEBUG nova.objects.instance [None req-13348fc2-8660-46bd-ac11-bf21353f0390 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Lazy-loading 'resources' on Instance uuid 9f6f8e97-cb21-4984-af08-a63ea4578eef {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1094.020116] env[69328]: DEBUG oslo_vmware.api [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ce7714-a8f3-ad7a-db2c-a09f0f598424, 'name': SearchDatastore_Task, 'duration_secs': 0.009049} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.020996] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-65cc6531-75de-4007-8fd7-2689b222d7ed {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.027587] env[69328]: DEBUG oslo_vmware.api [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Waiting for the task: (returnval){ [ 1094.027587] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52a9a4f4-f753-3691-1740-bd66752d33ab" [ 1094.027587] env[69328]: _type = "Task" [ 1094.027587] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.037720] env[69328]: DEBUG oslo_vmware.api [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52a9a4f4-f753-3691-1740-bd66752d33ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.086491] env[69328]: INFO nova.compute.manager [-] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Took 1.23 seconds to deallocate network for instance. [ 1094.389079] env[69328]: DEBUG nova.compute.utils [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1094.390673] env[69328]: DEBUG oslo_concurrency.lockutils [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Acquiring lock "refresh_cache-fb2d04d8-cff6-414c-9d50-3ab61729546d" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1094.390818] env[69328]: DEBUG oslo_concurrency.lockutils [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Acquired lock "refresh_cache-fb2d04d8-cff6-414c-9d50-3ab61729546d" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1094.391010] env[69328]: DEBUG nova.network.neutron [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1094.396730] env[69328]: DEBUG nova.compute.manager [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: aaa9deb3-9a52-43e3-bf9b-a53922439be2] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1094.397050] env[69328]: DEBUG nova.network.neutron [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: aaa9deb3-9a52-43e3-bf9b-a53922439be2] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1094.456318] env[69328]: DEBUG nova.policy [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a8fbe2a134194d29af48ac8e4986d0cb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd86de4d5055642aa86a29c6768e3db46', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 1094.547821] env[69328]: DEBUG oslo_vmware.api [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52a9a4f4-f753-3691-1740-bd66752d33ab, 'name': SearchDatastore_Task, 'duration_secs': 0.012047} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.547821] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1094.547821] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee/5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1094.547821] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b25f2590-b45f-4c58-92fa-32959e8eebdf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.563280] env[69328]: DEBUG oslo_vmware.api [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Waiting for the task: (returnval){ [ 1094.563280] env[69328]: value = "task-3274019" [ 1094.563280] env[69328]: _type = "Task" [ 1094.563280] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.572901] env[69328]: DEBUG oslo_vmware.api [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Task: {'id': task-3274019, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.594794] env[69328]: DEBUG oslo_concurrency.lockutils [None req-495c950a-50b7-49d3-b9c8-8697742e1ae4 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1094.705906] env[69328]: DEBUG oslo_concurrency.lockutils [None req-043ca1df-0673-4893-aecd-944942bfdd78 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquiring lock "52c87371-4142-40d6-ac68-804aabd9f823" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1094.707441] env[69328]: DEBUG oslo_concurrency.lockutils [None req-043ca1df-0673-4893-aecd-944942bfdd78 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Lock "52c87371-4142-40d6-ac68-804aabd9f823" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1094.707441] env[69328]: INFO nova.compute.manager [None req-043ca1df-0673-4893-aecd-944942bfdd78 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Attaching volume 3aa45c90-619e-4b44-982d-0c10542c37fe to /dev/sdb [ 1094.717933] env[69328]: DEBUG nova.network.neutron [-] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1094.757573] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-788e28ac-5ea0-4cfc-af12-25351f6017ef {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.760027] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02795ea8-3cc8-4e2b-9998-351760927abf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.771981] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ffd9f20-09fd-4c92-bda6-4ca144cb6406 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.776389] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f50b7e50-31d8-464a-97b8-fc42b2ae31e9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.814195] env[69328]: DEBUG nova.network.neutron [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: aaa9deb3-9a52-43e3-bf9b-a53922439be2] Successfully created port: 4c564b09-8166-45b8-b7de-267cd92d78c8 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1094.820910] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d292d56-c878-4348-b656-8d0c63ffdfac {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.824460] env[69328]: DEBUG nova.virt.block_device [None req-043ca1df-0673-4893-aecd-944942bfdd78 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Updating existing volume attachment record: b89dfeb2-008a-4b13-b33b-c29590205581 {{(pid=69328) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1094.833933] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-827f9a84-fa1e-4c17-94e8-d0deb9a3e86a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.851386] env[69328]: DEBUG nova.compute.provider_tree [None req-13348fc2-8660-46bd-ac11-bf21353f0390 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1094.897671] env[69328]: DEBUG nova.compute.manager [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: aaa9deb3-9a52-43e3-bf9b-a53922439be2] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1094.953278] env[69328]: DEBUG nova.network.neutron [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1095.072231] env[69328]: DEBUG oslo_vmware.api [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Task: {'id': task-3274019, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.478048} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.072593] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee/5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1095.072868] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1095.073189] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e03a8d0c-9634-4b67-8761-100b6dcc1984 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.081713] env[69328]: DEBUG oslo_vmware.api [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Waiting for the task: (returnval){ [ 1095.081713] env[69328]: value = "task-3274022" [ 1095.081713] env[69328]: _type = "Task" [ 1095.081713] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.092669] env[69328]: DEBUG oslo_vmware.api [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Task: {'id': task-3274022, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.127490] env[69328]: DEBUG nova.compute.manager [req-d5bbc24d-e270-4212-8ef8-8b2699defcaf req-4dc0eede-46b4-4e80-a625-d49c214c660f service nova] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Received event network-vif-deleted-95776220-5fd9-42a1-8bf9-cfb9fe49d62d {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1095.172521] env[69328]: DEBUG nova.network.neutron [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Updating instance_info_cache with network_info: [{"id": "d0a9a5ba-8927-4de7-892b-8444448e4551", "address": "fa:16:3e:41:00:13", "network": {"id": "363f5dfe-bfe3-453b-bf9f-9a28c32fa90f", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-59500137-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64a88af5392e4ee383413e85730a84d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "975b168a-03e5-449d-95ac-4d51ba027242", "external-id": "nsx-vlan-transportzone-365", "segmentation_id": 365, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0a9a5ba-89", "ovs_interfaceid": "d0a9a5ba-8927-4de7-892b-8444448e4551", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1095.221130] env[69328]: INFO nova.compute.manager [-] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Took 1.49 seconds to deallocate network for instance. [ 1095.357339] env[69328]: DEBUG nova.scheduler.client.report [None req-13348fc2-8660-46bd-ac11-bf21353f0390 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1095.593655] env[69328]: DEBUG oslo_vmware.api [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Task: {'id': task-3274022, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07262} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.594038] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1095.594763] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c2f3269-8b11-4644-85ae-c972b660d355 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.614901] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Reconfiguring VM instance instance-00000069 to attach disk [datastore1] 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee/5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1095.615335] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-75b74e43-9824-406a-a761-d38710e5b998 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.637670] env[69328]: DEBUG oslo_vmware.api [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Waiting for the task: (returnval){ [ 1095.637670] env[69328]: value = "task-3274024" [ 1095.637670] env[69328]: _type = "Task" [ 1095.637670] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.647780] env[69328]: DEBUG oslo_vmware.api [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Task: {'id': task-3274024, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.675893] env[69328]: DEBUG oslo_concurrency.lockutils [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Releasing lock "refresh_cache-fb2d04d8-cff6-414c-9d50-3ab61729546d" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1095.676337] env[69328]: DEBUG nova.compute.manager [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Instance network_info: |[{"id": "d0a9a5ba-8927-4de7-892b-8444448e4551", "address": "fa:16:3e:41:00:13", "network": {"id": "363f5dfe-bfe3-453b-bf9f-9a28c32fa90f", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-59500137-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64a88af5392e4ee383413e85730a84d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "975b168a-03e5-449d-95ac-4d51ba027242", "external-id": "nsx-vlan-transportzone-365", "segmentation_id": 365, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0a9a5ba-89", "ovs_interfaceid": "d0a9a5ba-8927-4de7-892b-8444448e4551", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1095.676824] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:41:00:13', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '975b168a-03e5-449d-95ac-4d51ba027242', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd0a9a5ba-8927-4de7-892b-8444448e4551', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1095.684582] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Creating folder: Project (64a88af5392e4ee383413e85730a84d1). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1095.684882] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3807d4f7-d06d-4eba-bf63-f27b249d836f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.697787] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Created folder: Project (64a88af5392e4ee383413e85730a84d1) in parent group-v653649. [ 1095.697974] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Creating folder: Instances. Parent ref: group-v653937. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1095.698279] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b8760c31-6320-4cd0-aa3f-12020f9de517 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.708010] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Created folder: Instances in parent group-v653937. [ 1095.708251] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1095.708465] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1095.708655] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-423b1f20-d29c-46cc-9b06-cd35e40479ac {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.729647] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0d214c25-ddc2-44fd-b498-d433b0aaa175 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1095.729874] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1095.729874] env[69328]: value = "task-3274027" [ 1095.729874] env[69328]: _type = "Task" [ 1095.729874] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.737417] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274027, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.808706] env[69328]: DEBUG nova.compute.manager [req-eff0e44a-0b92-42b1-9116-a6e855a644f7 req-94ccaa2e-a594-46b5-b642-a8dd638a86eb service nova] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Received event network-changed-d0a9a5ba-8927-4de7-892b-8444448e4551 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1095.808706] env[69328]: DEBUG nova.compute.manager [req-eff0e44a-0b92-42b1-9116-a6e855a644f7 req-94ccaa2e-a594-46b5-b642-a8dd638a86eb service nova] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Refreshing instance network info cache due to event network-changed-d0a9a5ba-8927-4de7-892b-8444448e4551. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1095.808706] env[69328]: DEBUG oslo_concurrency.lockutils [req-eff0e44a-0b92-42b1-9116-a6e855a644f7 req-94ccaa2e-a594-46b5-b642-a8dd638a86eb service nova] Acquiring lock "refresh_cache-fb2d04d8-cff6-414c-9d50-3ab61729546d" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1095.808706] env[69328]: DEBUG oslo_concurrency.lockutils [req-eff0e44a-0b92-42b1-9116-a6e855a644f7 req-94ccaa2e-a594-46b5-b642-a8dd638a86eb service nova] Acquired lock "refresh_cache-fb2d04d8-cff6-414c-9d50-3ab61729546d" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1095.808914] env[69328]: DEBUG nova.network.neutron [req-eff0e44a-0b92-42b1-9116-a6e855a644f7 req-94ccaa2e-a594-46b5-b642-a8dd638a86eb service nova] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Refreshing network info cache for port d0a9a5ba-8927-4de7-892b-8444448e4551 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1095.861351] env[69328]: DEBUG oslo_concurrency.lockutils [None req-13348fc2-8660-46bd-ac11-bf21353f0390 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.977s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1095.863773] env[69328]: DEBUG oslo_concurrency.lockutils [None req-495c950a-50b7-49d3-b9c8-8697742e1ae4 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.269s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1095.863988] env[69328]: DEBUG oslo_concurrency.lockutils [None req-495c950a-50b7-49d3-b9c8-8697742e1ae4 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1095.865790] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0d214c25-ddc2-44fd-b498-d433b0aaa175 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.136s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1095.866039] env[69328]: DEBUG nova.objects.instance [None req-0d214c25-ddc2-44fd-b498-d433b0aaa175 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lazy-loading 'resources' on Instance uuid dc050589-e37a-4798-9532-df4ecfab7eb1 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1095.885588] env[69328]: INFO nova.scheduler.client.report [None req-13348fc2-8660-46bd-ac11-bf21353f0390 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Deleted allocations for instance 9f6f8e97-cb21-4984-af08-a63ea4578eef [ 1095.887218] env[69328]: INFO nova.scheduler.client.report [None req-495c950a-50b7-49d3-b9c8-8697742e1ae4 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Deleted allocations for instance 204286d7-c806-48cb-85e9-b2a78571777c [ 1095.911215] env[69328]: DEBUG nova.compute.manager [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: aaa9deb3-9a52-43e3-bf9b-a53922439be2] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1095.938026] env[69328]: DEBUG nova.virt.hardware [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1095.938026] env[69328]: DEBUG nova.virt.hardware [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1095.938249] env[69328]: DEBUG nova.virt.hardware [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1095.938500] env[69328]: DEBUG nova.virt.hardware [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1095.938678] env[69328]: DEBUG nova.virt.hardware [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1095.938888] env[69328]: DEBUG nova.virt.hardware [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1095.939186] env[69328]: DEBUG nova.virt.hardware [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1095.939410] env[69328]: DEBUG nova.virt.hardware [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1095.939639] env[69328]: DEBUG nova.virt.hardware [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1095.939847] env[69328]: DEBUG nova.virt.hardware [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1095.940095] env[69328]: DEBUG nova.virt.hardware [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1095.941360] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea2912cb-d7ac-4314-aa8b-6f3e3f445d8b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.950097] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-359980a2-d906-46f3-aeb2-cbc1d57167e8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.151465] env[69328]: DEBUG oslo_vmware.api [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Task: {'id': task-3274024, 'name': ReconfigVM_Task, 'duration_secs': 0.394955} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.151627] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Reconfigured VM instance instance-00000069 to attach disk [datastore1] 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee/5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1096.152722] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-71f8ecb4-b9c0-4af1-8820-908ce3f4e569 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.160775] env[69328]: DEBUG oslo_vmware.api [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Waiting for the task: (returnval){ [ 1096.160775] env[69328]: value = "task-3274029" [ 1096.160775] env[69328]: _type = "Task" [ 1096.160775] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.240393] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274027, 'name': CreateVM_Task, 'duration_secs': 0.472283} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.240393] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1096.241144] env[69328]: DEBUG oslo_concurrency.lockutils [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1096.241317] env[69328]: DEBUG oslo_concurrency.lockutils [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1096.241650] env[69328]: DEBUG oslo_concurrency.lockutils [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1096.241912] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b12327b-2332-4abe-bde9-3a4f4f535bce {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.247806] env[69328]: DEBUG oslo_vmware.api [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Waiting for the task: (returnval){ [ 1096.247806] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]521624e3-dfa7-cf9b-1a80-49cfdadd7b2d" [ 1096.247806] env[69328]: _type = "Task" [ 1096.247806] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.258629] env[69328]: DEBUG oslo_vmware.api [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]521624e3-dfa7-cf9b-1a80-49cfdadd7b2d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.397826] env[69328]: DEBUG oslo_concurrency.lockutils [None req-495c950a-50b7-49d3-b9c8-8697742e1ae4 tempest-DeleteServersTestJSON-1704685797 tempest-DeleteServersTestJSON-1704685797-project-member] Lock "204286d7-c806-48cb-85e9-b2a78571777c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.189s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1096.401274] env[69328]: DEBUG oslo_concurrency.lockutils [None req-13348fc2-8660-46bd-ac11-bf21353f0390 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Lock "9f6f8e97-cb21-4984-af08-a63ea4578eef" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.316s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1096.455451] env[69328]: DEBUG nova.network.neutron [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: aaa9deb3-9a52-43e3-bf9b-a53922439be2] Successfully updated port: 4c564b09-8166-45b8-b7de-267cd92d78c8 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1096.552368] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ba35ea8f-9ced-4871-a26f-43a2b70c2e45 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquiring lock "a95d01cf-c26b-466c-a5b6-a7e43f0321fa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1096.552640] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ba35ea8f-9ced-4871-a26f-43a2b70c2e45 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Lock "a95d01cf-c26b-466c-a5b6-a7e43f0321fa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1096.552864] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ba35ea8f-9ced-4871-a26f-43a2b70c2e45 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquiring lock "a95d01cf-c26b-466c-a5b6-a7e43f0321fa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1096.553082] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ba35ea8f-9ced-4871-a26f-43a2b70c2e45 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Lock "a95d01cf-c26b-466c-a5b6-a7e43f0321fa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1096.553253] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ba35ea8f-9ced-4871-a26f-43a2b70c2e45 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Lock "a95d01cf-c26b-466c-a5b6-a7e43f0321fa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1096.555444] env[69328]: INFO nova.compute.manager [None req-ba35ea8f-9ced-4871-a26f-43a2b70c2e45 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] Terminating instance [ 1096.570597] env[69328]: DEBUG nova.network.neutron [req-eff0e44a-0b92-42b1-9116-a6e855a644f7 req-94ccaa2e-a594-46b5-b642-a8dd638a86eb service nova] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Updated VIF entry in instance network info cache for port d0a9a5ba-8927-4de7-892b-8444448e4551. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1096.570970] env[69328]: DEBUG nova.network.neutron [req-eff0e44a-0b92-42b1-9116-a6e855a644f7 req-94ccaa2e-a594-46b5-b642-a8dd638a86eb service nova] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Updating instance_info_cache with network_info: [{"id": "d0a9a5ba-8927-4de7-892b-8444448e4551", "address": "fa:16:3e:41:00:13", "network": {"id": "363f5dfe-bfe3-453b-bf9f-9a28c32fa90f", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-59500137-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64a88af5392e4ee383413e85730a84d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "975b168a-03e5-449d-95ac-4d51ba027242", "external-id": "nsx-vlan-transportzone-365", "segmentation_id": 365, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0a9a5ba-89", "ovs_interfaceid": "d0a9a5ba-8927-4de7-892b-8444448e4551", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1096.662285] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04a09c46-fcb7-4f6c-91b3-afae0574231b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.676315] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e4de00e-2b90-4cc6-8cf1-4e25f509423c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.679661] env[69328]: DEBUG oslo_vmware.api [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Task: {'id': task-3274029, 'name': Rename_Task, 'duration_secs': 0.156488} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.679956] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1096.680638] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-163395bb-9e0f-41fd-b6ba-feefa12f5ce7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.710388] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f90d126-5218-4d54-9531-bb1055937d44 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.713265] env[69328]: DEBUG oslo_vmware.api [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Waiting for the task: (returnval){ [ 1096.713265] env[69328]: value = "task-3274031" [ 1096.713265] env[69328]: _type = "Task" [ 1096.713265] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.720526] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be88c02d-932b-41a0-8453-949e8724a1cb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.728936] env[69328]: DEBUG oslo_vmware.api [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Task: {'id': task-3274031, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.741086] env[69328]: DEBUG nova.compute.provider_tree [None req-0d214c25-ddc2-44fd-b498-d433b0aaa175 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1096.757947] env[69328]: DEBUG oslo_vmware.api [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]521624e3-dfa7-cf9b-1a80-49cfdadd7b2d, 'name': SearchDatastore_Task, 'duration_secs': 0.009767} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.758273] env[69328]: DEBUG oslo_concurrency.lockutils [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1096.758511] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1096.758750] env[69328]: DEBUG oslo_concurrency.lockutils [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1096.758899] env[69328]: DEBUG oslo_concurrency.lockutils [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1096.759089] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1096.759344] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b00469e3-3249-4a9e-bf14-49fd35441360 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.776534] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1096.776772] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1096.777521] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-758a07dd-926f-4c90-acfa-4ac48dc7f463 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.782642] env[69328]: DEBUG oslo_vmware.api [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Waiting for the task: (returnval){ [ 1096.782642] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]525d2f18-66db-914c-58c9-b0b6c6bd7c4b" [ 1096.782642] env[69328]: _type = "Task" [ 1096.782642] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.790764] env[69328]: DEBUG oslo_vmware.api [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]525d2f18-66db-914c-58c9-b0b6c6bd7c4b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.958272] env[69328]: DEBUG oslo_concurrency.lockutils [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "refresh_cache-aaa9deb3-9a52-43e3-bf9b-a53922439be2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1096.958440] env[69328]: DEBUG oslo_concurrency.lockutils [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquired lock "refresh_cache-aaa9deb3-9a52-43e3-bf9b-a53922439be2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1096.958580] env[69328]: DEBUG nova.network.neutron [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: aaa9deb3-9a52-43e3-bf9b-a53922439be2] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1097.063617] env[69328]: DEBUG nova.compute.manager [None req-ba35ea8f-9ced-4871-a26f-43a2b70c2e45 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1097.063906] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ba35ea8f-9ced-4871-a26f-43a2b70c2e45 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1097.064829] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11ab01cb-7ccc-41df-a4ec-d98f31a8ead2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.073603] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba35ea8f-9ced-4871-a26f-43a2b70c2e45 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1097.073868] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-31fa2560-5a17-45f0-a438-09550c762279 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.075793] env[69328]: DEBUG oslo_concurrency.lockutils [req-eff0e44a-0b92-42b1-9116-a6e855a644f7 req-94ccaa2e-a594-46b5-b642-a8dd638a86eb service nova] Releasing lock "refresh_cache-fb2d04d8-cff6-414c-9d50-3ab61729546d" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1097.083210] env[69328]: DEBUG oslo_vmware.api [None req-ba35ea8f-9ced-4871-a26f-43a2b70c2e45 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 1097.083210] env[69328]: value = "task-3274032" [ 1097.083210] env[69328]: _type = "Task" [ 1097.083210] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.091148] env[69328]: DEBUG oslo_vmware.api [None req-ba35ea8f-9ced-4871-a26f-43a2b70c2e45 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3274032, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.162155] env[69328]: DEBUG nova.compute.manager [req-b4099505-6c78-4f35-943d-1e32cb8ce73f req-5cf55dfd-97bf-4af6-8074-0aeffc435e7c service nova] [instance: aaa9deb3-9a52-43e3-bf9b-a53922439be2] Received event network-vif-plugged-4c564b09-8166-45b8-b7de-267cd92d78c8 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1097.162155] env[69328]: DEBUG oslo_concurrency.lockutils [req-b4099505-6c78-4f35-943d-1e32cb8ce73f req-5cf55dfd-97bf-4af6-8074-0aeffc435e7c service nova] Acquiring lock "aaa9deb3-9a52-43e3-bf9b-a53922439be2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1097.162155] env[69328]: DEBUG oslo_concurrency.lockutils [req-b4099505-6c78-4f35-943d-1e32cb8ce73f req-5cf55dfd-97bf-4af6-8074-0aeffc435e7c service nova] Lock "aaa9deb3-9a52-43e3-bf9b-a53922439be2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1097.162155] env[69328]: DEBUG oslo_concurrency.lockutils [req-b4099505-6c78-4f35-943d-1e32cb8ce73f req-5cf55dfd-97bf-4af6-8074-0aeffc435e7c service nova] Lock "aaa9deb3-9a52-43e3-bf9b-a53922439be2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1097.162155] env[69328]: DEBUG nova.compute.manager [req-b4099505-6c78-4f35-943d-1e32cb8ce73f req-5cf55dfd-97bf-4af6-8074-0aeffc435e7c service nova] [instance: aaa9deb3-9a52-43e3-bf9b-a53922439be2] No waiting events found dispatching network-vif-plugged-4c564b09-8166-45b8-b7de-267cd92d78c8 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1097.162155] env[69328]: WARNING nova.compute.manager [req-b4099505-6c78-4f35-943d-1e32cb8ce73f req-5cf55dfd-97bf-4af6-8074-0aeffc435e7c service nova] [instance: aaa9deb3-9a52-43e3-bf9b-a53922439be2] Received unexpected event network-vif-plugged-4c564b09-8166-45b8-b7de-267cd92d78c8 for instance with vm_state building and task_state spawning. [ 1097.162155] env[69328]: DEBUG nova.compute.manager [req-b4099505-6c78-4f35-943d-1e32cb8ce73f req-5cf55dfd-97bf-4af6-8074-0aeffc435e7c service nova] [instance: aaa9deb3-9a52-43e3-bf9b-a53922439be2] Received event network-changed-4c564b09-8166-45b8-b7de-267cd92d78c8 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1097.162155] env[69328]: DEBUG nova.compute.manager [req-b4099505-6c78-4f35-943d-1e32cb8ce73f req-5cf55dfd-97bf-4af6-8074-0aeffc435e7c service nova] [instance: aaa9deb3-9a52-43e3-bf9b-a53922439be2] Refreshing instance network info cache due to event network-changed-4c564b09-8166-45b8-b7de-267cd92d78c8. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1097.162155] env[69328]: DEBUG oslo_concurrency.lockutils [req-b4099505-6c78-4f35-943d-1e32cb8ce73f req-5cf55dfd-97bf-4af6-8074-0aeffc435e7c service nova] Acquiring lock "refresh_cache-aaa9deb3-9a52-43e3-bf9b-a53922439be2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1097.224033] env[69328]: DEBUG oslo_vmware.api [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Task: {'id': task-3274031, 'name': PowerOnVM_Task, 'duration_secs': 0.488346} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.224285] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1097.224503] env[69328]: DEBUG nova.compute.manager [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1097.225329] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e68f0ac-0e52-48b4-92e7-69cb863ae8e3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.245256] env[69328]: DEBUG nova.scheduler.client.report [None req-0d214c25-ddc2-44fd-b498-d433b0aaa175 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1097.294879] env[69328]: DEBUG oslo_vmware.api [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]525d2f18-66db-914c-58c9-b0b6c6bd7c4b, 'name': SearchDatastore_Task, 'duration_secs': 0.012009} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.295805] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c6eda064-a16c-4802-ac1d-254e6fd15c6d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.302798] env[69328]: DEBUG oslo_vmware.api [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Waiting for the task: (returnval){ [ 1097.302798] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e402e8-08ff-9d98-e075-a6a2d837e425" [ 1097.302798] env[69328]: _type = "Task" [ 1097.302798] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.313233] env[69328]: DEBUG oslo_vmware.api [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e402e8-08ff-9d98-e075-a6a2d837e425, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.492679] env[69328]: DEBUG nova.network.neutron [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: aaa9deb3-9a52-43e3-bf9b-a53922439be2] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1097.594296] env[69328]: DEBUG oslo_vmware.api [None req-ba35ea8f-9ced-4871-a26f-43a2b70c2e45 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3274032, 'name': PowerOffVM_Task, 'duration_secs': 0.288184} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.594500] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba35ea8f-9ced-4871-a26f-43a2b70c2e45 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1097.594670] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ba35ea8f-9ced-4871-a26f-43a2b70c2e45 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1097.594933] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-63851544-b364-4873-af3e-da1a13f55114 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.667704] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ba35ea8f-9ced-4871-a26f-43a2b70c2e45 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1097.668086] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ba35ea8f-9ced-4871-a26f-43a2b70c2e45 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1097.668127] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba35ea8f-9ced-4871-a26f-43a2b70c2e45 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Deleting the datastore file [datastore2] a95d01cf-c26b-466c-a5b6-a7e43f0321fa {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1097.668395] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ff41c249-6f3c-40f7-a445-29e6f3d9d06d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.677676] env[69328]: DEBUG oslo_vmware.api [None req-ba35ea8f-9ced-4871-a26f-43a2b70c2e45 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 1097.677676] env[69328]: value = "task-3274035" [ 1097.677676] env[69328]: _type = "Task" [ 1097.677676] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.689386] env[69328]: DEBUG oslo_vmware.api [None req-ba35ea8f-9ced-4871-a26f-43a2b70c2e45 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3274035, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.700544] env[69328]: DEBUG nova.network.neutron [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: aaa9deb3-9a52-43e3-bf9b-a53922439be2] Updating instance_info_cache with network_info: [{"id": "4c564b09-8166-45b8-b7de-267cd92d78c8", "address": "fa:16:3e:6d:fe:11", "network": {"id": "7f5dcab4-3cec-42a1-b589-88cb373af645", "bridge": "br-int", "label": "tempest-ServersTestJSON-1833802368-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d86de4d5055642aa86a29c6768e3db46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b94712a6-b777-47dd-bc06-f9acfce2d936", "external-id": "nsx-vlan-transportzone-494", "segmentation_id": 494, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c564b09-81", "ovs_interfaceid": "4c564b09-8166-45b8-b7de-267cd92d78c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1097.743044] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1097.750100] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0d214c25-ddc2-44fd-b498-d433b0aaa175 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.884s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1097.752312] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.009s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1097.752509] env[69328]: DEBUG nova.objects.instance [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69328) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1097.777814] env[69328]: INFO nova.scheduler.client.report [None req-0d214c25-ddc2-44fd-b498-d433b0aaa175 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Deleted allocations for instance dc050589-e37a-4798-9532-df4ecfab7eb1 [ 1097.813487] env[69328]: DEBUG oslo_vmware.api [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e402e8-08ff-9d98-e075-a6a2d837e425, 'name': SearchDatastore_Task, 'duration_secs': 0.012501} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.813753] env[69328]: DEBUG oslo_concurrency.lockutils [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1097.814287] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] fb2d04d8-cff6-414c-9d50-3ab61729546d/fb2d04d8-cff6-414c-9d50-3ab61729546d.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1097.814287] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3a770eef-95eb-49ac-b2a3-07ec631158cf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.822175] env[69328]: DEBUG oslo_vmware.api [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Waiting for the task: (returnval){ [ 1097.822175] env[69328]: value = "task-3274036" [ 1097.822175] env[69328]: _type = "Task" [ 1097.822175] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.832036] env[69328]: DEBUG oslo_vmware.api [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Task: {'id': task-3274036, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.004963] env[69328]: INFO nova.compute.manager [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Rebuilding instance [ 1098.048323] env[69328]: DEBUG nova.compute.manager [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1098.048323] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cba54d30-9e3f-4d58-bc27-d727139933b9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.189876] env[69328]: DEBUG oslo_vmware.api [None req-ba35ea8f-9ced-4871-a26f-43a2b70c2e45 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3274035, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.319808} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.190216] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba35ea8f-9ced-4871-a26f-43a2b70c2e45 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1098.190471] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ba35ea8f-9ced-4871-a26f-43a2b70c2e45 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1098.190706] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ba35ea8f-9ced-4871-a26f-43a2b70c2e45 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1098.190975] env[69328]: INFO nova.compute.manager [None req-ba35ea8f-9ced-4871-a26f-43a2b70c2e45 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1098.191277] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ba35ea8f-9ced-4871-a26f-43a2b70c2e45 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1098.191511] env[69328]: DEBUG nova.compute.manager [-] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1098.191630] env[69328]: DEBUG nova.network.neutron [-] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1098.202859] env[69328]: DEBUG oslo_concurrency.lockutils [None req-496803d9-9d70-49b6-972f-7af2add245d8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Acquiring lock "5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1098.203152] env[69328]: DEBUG oslo_concurrency.lockutils [None req-496803d9-9d70-49b6-972f-7af2add245d8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Lock "5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1098.203374] env[69328]: DEBUG oslo_concurrency.lockutils [None req-496803d9-9d70-49b6-972f-7af2add245d8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Acquiring lock "5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1098.203620] env[69328]: DEBUG oslo_concurrency.lockutils [None req-496803d9-9d70-49b6-972f-7af2add245d8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Lock "5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1098.203814] env[69328]: DEBUG oslo_concurrency.lockutils [None req-496803d9-9d70-49b6-972f-7af2add245d8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Lock "5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1098.205777] env[69328]: DEBUG oslo_concurrency.lockutils [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Releasing lock "refresh_cache-aaa9deb3-9a52-43e3-bf9b-a53922439be2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1098.206100] env[69328]: DEBUG nova.compute.manager [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: aaa9deb3-9a52-43e3-bf9b-a53922439be2] Instance network_info: |[{"id": "4c564b09-8166-45b8-b7de-267cd92d78c8", "address": "fa:16:3e:6d:fe:11", "network": {"id": "7f5dcab4-3cec-42a1-b589-88cb373af645", "bridge": "br-int", "label": "tempest-ServersTestJSON-1833802368-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d86de4d5055642aa86a29c6768e3db46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b94712a6-b777-47dd-bc06-f9acfce2d936", "external-id": "nsx-vlan-transportzone-494", "segmentation_id": 494, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c564b09-81", "ovs_interfaceid": "4c564b09-8166-45b8-b7de-267cd92d78c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1098.206621] env[69328]: INFO nova.compute.manager [None req-496803d9-9d70-49b6-972f-7af2add245d8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Terminating instance [ 1098.208047] env[69328]: DEBUG oslo_concurrency.lockutils [req-b4099505-6c78-4f35-943d-1e32cb8ce73f req-5cf55dfd-97bf-4af6-8074-0aeffc435e7c service nova] Acquired lock "refresh_cache-aaa9deb3-9a52-43e3-bf9b-a53922439be2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1098.208286] env[69328]: DEBUG nova.network.neutron [req-b4099505-6c78-4f35-943d-1e32cb8ce73f req-5cf55dfd-97bf-4af6-8074-0aeffc435e7c service nova] [instance: aaa9deb3-9a52-43e3-bf9b-a53922439be2] Refreshing network info cache for port 4c564b09-8166-45b8-b7de-267cd92d78c8 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1098.209331] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: aaa9deb3-9a52-43e3-bf9b-a53922439be2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6d:fe:11', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b94712a6-b777-47dd-bc06-f9acfce2d936', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4c564b09-8166-45b8-b7de-267cd92d78c8', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1098.217034] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1098.219153] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aaa9deb3-9a52-43e3-bf9b-a53922439be2] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1098.219973] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-346d0f8e-e692-462f-b942-db69f2ccb411 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.245844] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1098.245844] env[69328]: value = "task-3274039" [ 1098.245844] env[69328]: _type = "Task" [ 1098.245844] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.259119] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274039, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.290095] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0d214c25-ddc2-44fd-b498-d433b0aaa175 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "dc050589-e37a-4798-9532-df4ecfab7eb1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.188s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1098.290167] env[69328]: DEBUG oslo_concurrency.lockutils [req-0fce00ee-a6f3-4359-a626-97234d79a216 req-71fa809c-4f05-4608-b1e2-3f4ccc508394 service nova] Acquired lock "dc050589-e37a-4798-9532-df4ecfab7eb1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1098.291216] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e31f1ae-ca37-49b9-bebf-604ebb4ac8b3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.301384] env[69328]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1098.301384] env[69328]: DEBUG oslo_vmware.api [-] Fault list: [ManagedObjectNotFound] {{(pid=69328) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1098.302330] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-803001c2-3ec1-4e75-861f-0a3448388361 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.312782] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03a28db8-ad2d-40cc-ae69-f9d94e092372 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.342532] env[69328]: DEBUG oslo_vmware.api [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Task: {'id': task-3274036, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.360613] env[69328]: ERROR root [req-0fce00ee-a6f3-4359-a626-97234d79a216 req-71fa809c-4f05-4608-b1e2-3f4ccc508394 service nova] Original exception being dropped: ['Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 377, in request_handler\n response = request(managed_object, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 586, in __call__\n return client.invoke(args, kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 728, in invoke\n result = self.send(soapenv, timeout=timeout)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 777, in send\n return self.process_reply(reply.message, None, None)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 840, in process_reply\n raise WebFault(fault, replyroot)\n', "suds.WebFault: Server raised fault: 'The object 'vim.VirtualMachine:vm-653898' has already been deleted or has not been completely created'\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 301, in _invoke_api\n return api_method(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 479, in get_object_property\n props = get_object_properties(vim, moref, [property_name],\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 360, in get_object_properties\n retrieve_result = vim.RetrievePropertiesEx(\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 413, in request_handler\n raise exceptions.VimFaultException(fault_list, fault_string,\n', "oslo_vmware.exceptions.VimFaultException: The object 'vim.VirtualMachine:vm-653898' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-653898' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-653898'}\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 123, in _call_method\n return self.invoke_api(module, method, self.vim, *args,\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 358, in invoke_api\n return _invoke_api(module, method, *args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 122, in func\n return evt.wait()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait\n result = hub.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch\n return self.greenlet.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 122, in _inner\n idle = self.f(*self.args, **self.kw)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 96, in _func\n result = f(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 341, in _invoke_api\n raise clazz(str(excep),\n', "oslo_vmware.exceptions.ManagedObjectNotFoundException: The object 'vim.VirtualMachine:vm-653898' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-653898' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-653898'}\n"]: nova.exception.InstanceNotFound: Instance dc050589-e37a-4798-9532-df4ecfab7eb1 could not be found. [ 1098.360613] env[69328]: DEBUG oslo_concurrency.lockutils [req-0fce00ee-a6f3-4359-a626-97234d79a216 req-71fa809c-4f05-4608-b1e2-3f4ccc508394 service nova] Releasing lock "dc050589-e37a-4798-9532-df4ecfab7eb1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1098.360956] env[69328]: DEBUG nova.compute.manager [req-0fce00ee-a6f3-4359-a626-97234d79a216 req-71fa809c-4f05-4608-b1e2-3f4ccc508394 service nova] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Detach interface failed, port_id=2febedad-c6fa-48cf-893b-6baa5b6ddcd6, reason: Instance dc050589-e37a-4798-9532-df4ecfab7eb1 could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1098.498984] env[69328]: DEBUG nova.compute.manager [req-bcc28b80-0535-437e-bc23-cd25c3a86830 req-583c88b6-7baa-4a85-bf7e-853b6af2703f service nova] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] Received event network-vif-deleted-9fa28c73-cefa-44f5-a043-9e6ce86838c0 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1098.499387] env[69328]: INFO nova.compute.manager [req-bcc28b80-0535-437e-bc23-cd25c3a86830 req-583c88b6-7baa-4a85-bf7e-853b6af2703f service nova] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] Neutron deleted interface 9fa28c73-cefa-44f5-a043-9e6ce86838c0; detaching it from the instance and deleting it from the info cache [ 1098.499534] env[69328]: DEBUG nova.network.neutron [req-bcc28b80-0535-437e-bc23-cd25c3a86830 req-583c88b6-7baa-4a85-bf7e-853b6af2703f service nova] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1098.720716] env[69328]: DEBUG oslo_concurrency.lockutils [None req-496803d9-9d70-49b6-972f-7af2add245d8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Acquiring lock "refresh_cache-5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1098.721049] env[69328]: DEBUG oslo_concurrency.lockutils [None req-496803d9-9d70-49b6-972f-7af2add245d8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Acquired lock "refresh_cache-5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1098.721098] env[69328]: DEBUG nova.network.neutron [None req-496803d9-9d70-49b6-972f-7af2add245d8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1098.759400] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274039, 'name': CreateVM_Task} progress is 25%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.762198] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9d1df8be-3700-4262-9145-85687fc3828d tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.010s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1098.840432] env[69328]: DEBUG oslo_vmware.api [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Task: {'id': task-3274036, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.654702} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.840784] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] fb2d04d8-cff6-414c-9d50-3ab61729546d/fb2d04d8-cff6-414c-9d50-3ab61729546d.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1098.841022] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1098.841297] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-768f020b-1af2-4dbb-bbec-8424c45ac427 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.849059] env[69328]: DEBUG oslo_vmware.api [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Waiting for the task: (returnval){ [ 1098.849059] env[69328]: value = "task-3274040" [ 1098.849059] env[69328]: _type = "Task" [ 1098.849059] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.864792] env[69328]: DEBUG oslo_vmware.api [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Task: {'id': task-3274040, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.921207] env[69328]: DEBUG oslo_concurrency.lockutils [None req-795e502a-82f2-40bd-bb7e-2a2d8726da4c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquiring lock "071c1837-9d0b-4b69-b16e-991b300385fb" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1098.921207] env[69328]: DEBUG oslo_concurrency.lockutils [None req-795e502a-82f2-40bd-bb7e-2a2d8726da4c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "071c1837-9d0b-4b69-b16e-991b300385fb" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1098.957714] env[69328]: DEBUG nova.network.neutron [-] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1099.003821] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-330b3f52-db70-4d8d-a826-e917ecfbb3e8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.016543] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-548ff25f-d320-44c3-8fcc-23dbdf1a4b75 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.055497] env[69328]: DEBUG nova.compute.manager [req-bcc28b80-0535-437e-bc23-cd25c3a86830 req-583c88b6-7baa-4a85-bf7e-853b6af2703f service nova] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] Detach interface failed, port_id=9fa28c73-cefa-44f5-a043-9e6ce86838c0, reason: Instance a95d01cf-c26b-466c-a5b6-a7e43f0321fa could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1099.060144] env[69328]: DEBUG nova.network.neutron [req-b4099505-6c78-4f35-943d-1e32cb8ce73f req-5cf55dfd-97bf-4af6-8074-0aeffc435e7c service nova] [instance: aaa9deb3-9a52-43e3-bf9b-a53922439be2] Updated VIF entry in instance network info cache for port 4c564b09-8166-45b8-b7de-267cd92d78c8. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1099.060765] env[69328]: DEBUG nova.network.neutron [req-b4099505-6c78-4f35-943d-1e32cb8ce73f req-5cf55dfd-97bf-4af6-8074-0aeffc435e7c service nova] [instance: aaa9deb3-9a52-43e3-bf9b-a53922439be2] Updating instance_info_cache with network_info: [{"id": "4c564b09-8166-45b8-b7de-267cd92d78c8", "address": "fa:16:3e:6d:fe:11", "network": {"id": "7f5dcab4-3cec-42a1-b589-88cb373af645", "bridge": "br-int", "label": "tempest-ServersTestJSON-1833802368-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d86de4d5055642aa86a29c6768e3db46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b94712a6-b777-47dd-bc06-f9acfce2d936", "external-id": "nsx-vlan-transportzone-494", "segmentation_id": 494, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c564b09-81", "ovs_interfaceid": "4c564b09-8166-45b8-b7de-267cd92d78c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1099.064046] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1099.064046] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-14c9fcdf-26ef-49b7-b8fb-9af4536c20e1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.074150] env[69328]: DEBUG oslo_vmware.api [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1099.074150] env[69328]: value = "task-3274041" [ 1099.074150] env[69328]: _type = "Task" [ 1099.074150] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.084107] env[69328]: DEBUG oslo_vmware.api [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274041, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.241989] env[69328]: DEBUG nova.network.neutron [None req-496803d9-9d70-49b6-972f-7af2add245d8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1099.260027] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274039, 'name': CreateVM_Task, 'duration_secs': 0.674078} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.263058] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aaa9deb3-9a52-43e3-bf9b-a53922439be2] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1099.263798] env[69328]: DEBUG oslo_concurrency.lockutils [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1099.263991] env[69328]: DEBUG oslo_concurrency.lockutils [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1099.264321] env[69328]: DEBUG oslo_concurrency.lockutils [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1099.265146] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c05cb181-07db-42fb-b79f-5bdf84e7c572 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.269987] env[69328]: DEBUG oslo_vmware.api [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 1099.269987] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5242f98f-3968-7f78-dbcd-64690599f0ee" [ 1099.269987] env[69328]: _type = "Task" [ 1099.269987] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.278714] env[69328]: DEBUG oslo_vmware.api [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5242f98f-3968-7f78-dbcd-64690599f0ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.308513] env[69328]: DEBUG nova.network.neutron [None req-496803d9-9d70-49b6-972f-7af2add245d8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1099.359807] env[69328]: DEBUG oslo_vmware.api [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Task: {'id': task-3274040, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.279156} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.360515] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1099.361330] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bb50008-c841-4daf-ad2b-3bc0f70093e6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.384631] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Reconfiguring VM instance instance-0000006a to attach disk [datastore1] fb2d04d8-cff6-414c-9d50-3ab61729546d/fb2d04d8-cff6-414c-9d50-3ab61729546d.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1099.385235] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-043ca1df-0673-4893-aecd-944942bfdd78 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Volume attach. Driver type: vmdk {{(pid=69328) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1099.385452] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-043ca1df-0673-4893-aecd-944942bfdd78 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653936', 'volume_id': '3aa45c90-619e-4b44-982d-0c10542c37fe', 'name': 'volume-3aa45c90-619e-4b44-982d-0c10542c37fe', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '52c87371-4142-40d6-ac68-804aabd9f823', 'attached_at': '', 'detached_at': '', 'volume_id': '3aa45c90-619e-4b44-982d-0c10542c37fe', 'serial': '3aa45c90-619e-4b44-982d-0c10542c37fe'} {{(pid=69328) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1099.386035] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-88a392fa-700a-434c-96fb-557868cc6a17 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.401714] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f10b513b-13ef-401d-8ac1-09325c5604d7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.423143] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44737d7c-009f-4e6b-bd00-58790ad92a36 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.425886] env[69328]: DEBUG oslo_vmware.api [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Waiting for the task: (returnval){ [ 1099.425886] env[69328]: value = "task-3274042" [ 1099.425886] env[69328]: _type = "Task" [ 1099.425886] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.426910] env[69328]: DEBUG nova.compute.utils [None req-795e502a-82f2-40bd-bb7e-2a2d8726da4c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1099.451956] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-043ca1df-0673-4893-aecd-944942bfdd78 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Reconfiguring VM instance instance-00000056 to attach disk [datastore1] volume-3aa45c90-619e-4b44-982d-0c10542c37fe/volume-3aa45c90-619e-4b44-982d-0c10542c37fe.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1099.453366] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8ab76da7-7d8e-4d9b-b38c-76efec273a7b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.471554] env[69328]: INFO nova.compute.manager [-] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] Took 1.28 seconds to deallocate network for instance. [ 1099.471927] env[69328]: DEBUG oslo_vmware.api [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Task: {'id': task-3274042, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.481742] env[69328]: DEBUG oslo_vmware.api [None req-043ca1df-0673-4893-aecd-944942bfdd78 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 1099.481742] env[69328]: value = "task-3274043" [ 1099.481742] env[69328]: _type = "Task" [ 1099.481742] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.491347] env[69328]: DEBUG oslo_vmware.api [None req-043ca1df-0673-4893-aecd-944942bfdd78 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3274043, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.563928] env[69328]: DEBUG oslo_concurrency.lockutils [req-b4099505-6c78-4f35-943d-1e32cb8ce73f req-5cf55dfd-97bf-4af6-8074-0aeffc435e7c service nova] Releasing lock "refresh_cache-aaa9deb3-9a52-43e3-bf9b-a53922439be2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1099.583000] env[69328]: DEBUG oslo_vmware.api [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274041, 'name': PowerOffVM_Task, 'duration_secs': 0.216136} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.583298] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1099.583575] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1099.584395] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52d1956d-3203-464f-8f10-797396048b24 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.592240] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1099.592505] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1db8b699-34c1-45d5-8e63-f6fc56182cc2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.665258] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1099.665511] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1099.665760] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Deleting the datastore file [datastore2] dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1099.666105] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bdc5de4e-2f77-4c2d-913c-a171ed1863fa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.674536] env[69328]: DEBUG oslo_vmware.api [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1099.674536] env[69328]: value = "task-3274045" [ 1099.674536] env[69328]: _type = "Task" [ 1099.674536] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.684813] env[69328]: DEBUG oslo_vmware.api [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274045, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.781883] env[69328]: DEBUG oslo_vmware.api [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5242f98f-3968-7f78-dbcd-64690599f0ee, 'name': SearchDatastore_Task, 'duration_secs': 0.018713} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.782229] env[69328]: DEBUG oslo_concurrency.lockutils [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1099.782659] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: aaa9deb3-9a52-43e3-bf9b-a53922439be2] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1099.782751] env[69328]: DEBUG oslo_concurrency.lockutils [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1099.782856] env[69328]: DEBUG oslo_concurrency.lockutils [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1099.783030] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1099.783310] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5862a5d9-2cc7-4214-8df6-2dc0575b50b5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.795112] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1099.795306] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1099.796153] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ddc43688-3359-4e7c-9ecd-081033cbcfd3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.803218] env[69328]: DEBUG oslo_vmware.api [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 1099.803218] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d119f2-6dea-aff4-a7a3-6ed0b73c740c" [ 1099.803218] env[69328]: _type = "Task" [ 1099.803218] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.815008] env[69328]: DEBUG oslo_concurrency.lockutils [None req-496803d9-9d70-49b6-972f-7af2add245d8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Releasing lock "refresh_cache-5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1099.815538] env[69328]: DEBUG nova.compute.manager [None req-496803d9-9d70-49b6-972f-7af2add245d8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1099.815827] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-496803d9-9d70-49b6-972f-7af2add245d8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1099.816185] env[69328]: DEBUG oslo_vmware.api [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d119f2-6dea-aff4-a7a3-6ed0b73c740c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.817565] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e78f2e65-bf96-4020-8e8e-ae62c3eae1c4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.827649] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-496803d9-9d70-49b6-972f-7af2add245d8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1099.828174] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d0b0f3f9-2d4b-44fc-89d1-f6745ce64f47 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.837045] env[69328]: DEBUG oslo_vmware.api [None req-496803d9-9d70-49b6-972f-7af2add245d8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Waiting for the task: (returnval){ [ 1099.837045] env[69328]: value = "task-3274046" [ 1099.837045] env[69328]: _type = "Task" [ 1099.837045] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.846920] env[69328]: DEBUG oslo_vmware.api [None req-496803d9-9d70-49b6-972f-7af2add245d8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Task: {'id': task-3274046, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.934720] env[69328]: DEBUG oslo_concurrency.lockutils [None req-795e502a-82f2-40bd-bb7e-2a2d8726da4c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "071c1837-9d0b-4b69-b16e-991b300385fb" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.013s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1099.941735] env[69328]: DEBUG oslo_vmware.api [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Task: {'id': task-3274042, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.979017] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ba35ea8f-9ced-4871-a26f-43a2b70c2e45 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1099.979632] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ba35ea8f-9ced-4871-a26f-43a2b70c2e45 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1099.979740] env[69328]: DEBUG nova.objects.instance [None req-ba35ea8f-9ced-4871-a26f-43a2b70c2e45 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Lazy-loading 'resources' on Instance uuid a95d01cf-c26b-466c-a5b6-a7e43f0321fa {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1100.003572] env[69328]: DEBUG oslo_vmware.api [None req-043ca1df-0673-4893-aecd-944942bfdd78 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3274043, 'name': ReconfigVM_Task, 'duration_secs': 0.49233} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.003572] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-043ca1df-0673-4893-aecd-944942bfdd78 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Reconfigured VM instance instance-00000056 to attach disk [datastore1] volume-3aa45c90-619e-4b44-982d-0c10542c37fe/volume-3aa45c90-619e-4b44-982d-0c10542c37fe.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1100.010594] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1eb2f34e-9be0-48d3-9c06-64310a741546 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.035572] env[69328]: DEBUG oslo_vmware.api [None req-043ca1df-0673-4893-aecd-944942bfdd78 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 1100.035572] env[69328]: value = "task-3274048" [ 1100.035572] env[69328]: _type = "Task" [ 1100.035572] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.046019] env[69328]: DEBUG oslo_vmware.api [None req-043ca1df-0673-4893-aecd-944942bfdd78 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3274048, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.189535] env[69328]: DEBUG oslo_vmware.api [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274045, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.239154} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.189779] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1100.190066] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1100.190348] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1100.322338] env[69328]: DEBUG oslo_vmware.api [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d119f2-6dea-aff4-a7a3-6ed0b73c740c, 'name': SearchDatastore_Task, 'duration_secs': 0.013274} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.325432] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a8f016d-752b-4311-a957-45d767a58d67 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.337527] env[69328]: DEBUG oslo_vmware.api [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 1100.337527] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52239ed7-7cd6-985c-7de6-6b768aee41c6" [ 1100.337527] env[69328]: _type = "Task" [ 1100.337527] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.354143] env[69328]: DEBUG oslo_vmware.api [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52239ed7-7cd6-985c-7de6-6b768aee41c6, 'name': SearchDatastore_Task, 'duration_secs': 0.010229} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.354143] env[69328]: DEBUG oslo_vmware.api [None req-496803d9-9d70-49b6-972f-7af2add245d8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Task: {'id': task-3274046, 'name': PowerOffVM_Task, 'duration_secs': 0.196106} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.354296] env[69328]: DEBUG oslo_concurrency.lockutils [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1100.354950] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] aaa9deb3-9a52-43e3-bf9b-a53922439be2/aaa9deb3-9a52-43e3-bf9b-a53922439be2.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1100.354950] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-496803d9-9d70-49b6-972f-7af2add245d8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1100.354950] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-496803d9-9d70-49b6-972f-7af2add245d8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1100.355148] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a426f1e9-3eee-4542-9745-82c613f6a034 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.357199] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8477f277-b906-4644-ad49-3a796f59e0cf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.365987] env[69328]: DEBUG oslo_vmware.api [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 1100.365987] env[69328]: value = "task-3274050" [ 1100.365987] env[69328]: _type = "Task" [ 1100.365987] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.375169] env[69328]: DEBUG oslo_vmware.api [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3274050, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.386360] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-496803d9-9d70-49b6-972f-7af2add245d8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1100.386583] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-496803d9-9d70-49b6-972f-7af2add245d8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1100.386763] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-496803d9-9d70-49b6-972f-7af2add245d8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Deleting the datastore file [datastore1] 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1100.387073] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d18a1e3f-ead7-4b89-bb3a-04499ad940e7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.402863] env[69328]: DEBUG oslo_concurrency.lockutils [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "de8e6616-0460-4a6e-918c-a27818da96e2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1100.403139] env[69328]: DEBUG oslo_concurrency.lockutils [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "de8e6616-0460-4a6e-918c-a27818da96e2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1100.404748] env[69328]: DEBUG oslo_vmware.api [None req-496803d9-9d70-49b6-972f-7af2add245d8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Waiting for the task: (returnval){ [ 1100.404748] env[69328]: value = "task-3274052" [ 1100.404748] env[69328]: _type = "Task" [ 1100.404748] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.413897] env[69328]: DEBUG oslo_vmware.api [None req-496803d9-9d70-49b6-972f-7af2add245d8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Task: {'id': task-3274052, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.441137] env[69328]: DEBUG oslo_vmware.api [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Task: {'id': task-3274042, 'name': ReconfigVM_Task, 'duration_secs': 0.598994} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.441651] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Reconfigured VM instance instance-0000006a to attach disk [datastore1] fb2d04d8-cff6-414c-9d50-3ab61729546d/fb2d04d8-cff6-414c-9d50-3ab61729546d.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1100.442525] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fc7e7359-d8e1-4448-832a-97920f8bacfb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.451107] env[69328]: DEBUG oslo_vmware.api [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Waiting for the task: (returnval){ [ 1100.451107] env[69328]: value = "task-3274053" [ 1100.451107] env[69328]: _type = "Task" [ 1100.451107] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.463877] env[69328]: DEBUG oslo_vmware.api [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Task: {'id': task-3274053, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.551972] env[69328]: DEBUG oslo_vmware.api [None req-043ca1df-0673-4893-aecd-944942bfdd78 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3274048, 'name': ReconfigVM_Task, 'duration_secs': 0.165576} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.556384] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-043ca1df-0673-4893-aecd-944942bfdd78 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653936', 'volume_id': '3aa45c90-619e-4b44-982d-0c10542c37fe', 'name': 'volume-3aa45c90-619e-4b44-982d-0c10542c37fe', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '52c87371-4142-40d6-ac68-804aabd9f823', 'attached_at': '', 'detached_at': '', 'volume_id': '3aa45c90-619e-4b44-982d-0c10542c37fe', 'serial': '3aa45c90-619e-4b44-982d-0c10542c37fe'} {{(pid=69328) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1100.799808] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f53bb39c-eaa0-4110-b0fa-d0153c336d7e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.809007] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3209188a-9845-4868-a0b8-fb6f636eb5a2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.843703] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4592e5a-3523-4ea6-867e-0749da90885d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.852418] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5089939-afa8-4fb6-ab71-f61fbf7cb1d7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.867653] env[69328]: DEBUG nova.compute.provider_tree [None req-ba35ea8f-9ced-4871-a26f-43a2b70c2e45 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1100.879062] env[69328]: DEBUG oslo_vmware.api [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3274050, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.909508] env[69328]: DEBUG nova.compute.manager [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1100.917951] env[69328]: DEBUG oslo_vmware.api [None req-496803d9-9d70-49b6-972f-7af2add245d8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Task: {'id': task-3274052, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.116493} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.918310] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-496803d9-9d70-49b6-972f-7af2add245d8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1100.918528] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-496803d9-9d70-49b6-972f-7af2add245d8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1100.918715] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-496803d9-9d70-49b6-972f-7af2add245d8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1100.918888] env[69328]: INFO nova.compute.manager [None req-496803d9-9d70-49b6-972f-7af2add245d8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1100.919150] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-496803d9-9d70-49b6-972f-7af2add245d8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1100.919349] env[69328]: DEBUG nova.compute.manager [-] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1100.919446] env[69328]: DEBUG nova.network.neutron [-] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1100.936984] env[69328]: DEBUG nova.network.neutron [-] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1100.962420] env[69328]: DEBUG oslo_vmware.api [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Task: {'id': task-3274053, 'name': Rename_Task, 'duration_secs': 0.245292} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.962681] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1100.962912] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ab508f2b-37ba-49cb-9476-d64aae956c9a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.971724] env[69328]: DEBUG oslo_vmware.api [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Waiting for the task: (returnval){ [ 1100.971724] env[69328]: value = "task-3274054" [ 1100.971724] env[69328]: _type = "Task" [ 1100.971724] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.980458] env[69328]: DEBUG oslo_vmware.api [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Task: {'id': task-3274054, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.013716] env[69328]: DEBUG oslo_concurrency.lockutils [None req-795e502a-82f2-40bd-bb7e-2a2d8726da4c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquiring lock "071c1837-9d0b-4b69-b16e-991b300385fb" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1101.013814] env[69328]: DEBUG oslo_concurrency.lockutils [None req-795e502a-82f2-40bd-bb7e-2a2d8726da4c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "071c1837-9d0b-4b69-b16e-991b300385fb" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1101.014284] env[69328]: INFO nova.compute.manager [None req-795e502a-82f2-40bd-bb7e-2a2d8726da4c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Attaching volume 1afb6c74-c9b8-4214-9cd1-b9f3396261c4 to /dev/sdb [ 1101.063587] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-193588b0-d815-4ea6-9d8f-805cbda7880c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.071730] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27a3def4-14d7-4630-af0a-9aef3dd516f9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.088295] env[69328]: DEBUG nova.virt.block_device [None req-795e502a-82f2-40bd-bb7e-2a2d8726da4c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Updating existing volume attachment record: 310bfe00-7db5-4de4-9f38-9a7d67f075b1 {{(pid=69328) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1101.234023] env[69328]: DEBUG nova.virt.hardware [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1101.234308] env[69328]: DEBUG nova.virt.hardware [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1101.234602] env[69328]: DEBUG nova.virt.hardware [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1101.234864] env[69328]: DEBUG nova.virt.hardware [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1101.235278] env[69328]: DEBUG nova.virt.hardware [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1101.235487] env[69328]: DEBUG nova.virt.hardware [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1101.236441] env[69328]: DEBUG nova.virt.hardware [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1101.236441] env[69328]: DEBUG nova.virt.hardware [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1101.236441] env[69328]: DEBUG nova.virt.hardware [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1101.236542] env[69328]: DEBUG nova.virt.hardware [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1101.236767] env[69328]: DEBUG nova.virt.hardware [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1101.237725] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41bba275-4655-4bda-8bc7-197cdff06b85 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.247870] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d043dd7e-2631-4870-8efe-69a496fc86a9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.263997] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:47:6b:17', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '357d2811-e990-4985-9f9e-b158d10d3699', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd779425b-180c-47fd-b307-e02e14f18a26', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1101.271778] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1101.272078] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1101.272305] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-19631fdf-3664-4d1f-85e4-e973d1b7cb06 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.293984] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1101.293984] env[69328]: value = "task-3274056" [ 1101.293984] env[69328]: _type = "Task" [ 1101.293984] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.303567] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274056, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.373929] env[69328]: DEBUG nova.scheduler.client.report [None req-ba35ea8f-9ced-4871-a26f-43a2b70c2e45 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1101.381504] env[69328]: DEBUG oslo_vmware.api [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3274050, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.556583} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.382071] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] aaa9deb3-9a52-43e3-bf9b-a53922439be2/aaa9deb3-9a52-43e3-bf9b-a53922439be2.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1101.382316] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: aaa9deb3-9a52-43e3-bf9b-a53922439be2] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1101.382646] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ca30ae26-ef71-42b0-894b-dfb2a2189270 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.392560] env[69328]: DEBUG oslo_vmware.api [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 1101.392560] env[69328]: value = "task-3274057" [ 1101.392560] env[69328]: _type = "Task" [ 1101.392560] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.403342] env[69328]: DEBUG oslo_vmware.api [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3274057, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.435265] env[69328]: DEBUG oslo_concurrency.lockutils [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1101.439972] env[69328]: DEBUG nova.network.neutron [-] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1101.485144] env[69328]: DEBUG oslo_vmware.api [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Task: {'id': task-3274054, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.605496] env[69328]: DEBUG nova.objects.instance [None req-043ca1df-0673-4893-aecd-944942bfdd78 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Lazy-loading 'flavor' on Instance uuid 52c87371-4142-40d6-ac68-804aabd9f823 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1101.803369] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274056, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.882936] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ba35ea8f-9ced-4871-a26f-43a2b70c2e45 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.903s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1101.885806] env[69328]: DEBUG oslo_concurrency.lockutils [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.451s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1101.887530] env[69328]: INFO nova.compute.claims [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1101.902198] env[69328]: DEBUG oslo_vmware.api [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3274057, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077364} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.902462] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: aaa9deb3-9a52-43e3-bf9b-a53922439be2] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1101.903256] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e836efb-9279-4570-a04f-131c300624e5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.925901] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: aaa9deb3-9a52-43e3-bf9b-a53922439be2] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] aaa9deb3-9a52-43e3-bf9b-a53922439be2/aaa9deb3-9a52-43e3-bf9b-a53922439be2.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1101.926489] env[69328]: INFO nova.scheduler.client.report [None req-ba35ea8f-9ced-4871-a26f-43a2b70c2e45 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Deleted allocations for instance a95d01cf-c26b-466c-a5b6-a7e43f0321fa [ 1101.927450] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6499e2a3-f682-43aa-9d5b-875e0e8cfac0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.944430] env[69328]: INFO nova.compute.manager [-] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Took 1.02 seconds to deallocate network for instance. [ 1101.951353] env[69328]: DEBUG oslo_vmware.api [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 1101.951353] env[69328]: value = "task-3274058" [ 1101.951353] env[69328]: _type = "Task" [ 1101.951353] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.961588] env[69328]: DEBUG oslo_vmware.api [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3274058, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.985664] env[69328]: DEBUG oslo_vmware.api [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Task: {'id': task-3274054, 'name': PowerOnVM_Task, 'duration_secs': 0.556816} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.986103] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1101.986410] env[69328]: INFO nova.compute.manager [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Took 8.57 seconds to spawn the instance on the hypervisor. [ 1101.987088] env[69328]: DEBUG nova.compute.manager [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1101.988108] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6907c4a3-31b1-4d03-bc31-76041c75fd44 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.111665] env[69328]: DEBUG oslo_concurrency.lockutils [None req-043ca1df-0673-4893-aecd-944942bfdd78 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Lock "52c87371-4142-40d6-ac68-804aabd9f823" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.405s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1102.134238] env[69328]: INFO nova.compute.manager [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Rescuing [ 1102.134591] env[69328]: DEBUG oslo_concurrency.lockutils [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquiring lock "refresh_cache-52c87371-4142-40d6-ac68-804aabd9f823" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1102.134795] env[69328]: DEBUG oslo_concurrency.lockutils [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquired lock "refresh_cache-52c87371-4142-40d6-ac68-804aabd9f823" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1102.134963] env[69328]: DEBUG nova.network.neutron [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1102.305769] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274056, 'name': CreateVM_Task, 'duration_secs': 0.56775} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.305951] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1102.306643] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1102.306813] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1102.307165] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1102.307423] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-767f16fc-cb51-4416-a82a-f9b86843081f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.312368] env[69328]: DEBUG oslo_vmware.api [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1102.312368] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5211d620-20c6-6110-4622-5de843926b99" [ 1102.312368] env[69328]: _type = "Task" [ 1102.312368] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.320169] env[69328]: DEBUG oslo_vmware.api [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5211d620-20c6-6110-4622-5de843926b99, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.448405] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ba35ea8f-9ced-4871-a26f-43a2b70c2e45 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Lock "a95d01cf-c26b-466c-a5b6-a7e43f0321fa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.896s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1102.451413] env[69328]: DEBUG oslo_concurrency.lockutils [None req-496803d9-9d70-49b6-972f-7af2add245d8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1102.463156] env[69328]: DEBUG oslo_vmware.api [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3274058, 'name': ReconfigVM_Task, 'duration_secs': 0.290884} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.464216] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: aaa9deb3-9a52-43e3-bf9b-a53922439be2] Reconfigured VM instance instance-0000006b to attach disk [datastore1] aaa9deb3-9a52-43e3-bf9b-a53922439be2/aaa9deb3-9a52-43e3-bf9b-a53922439be2.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1102.464902] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8487bd7d-c955-425b-8556-b07591d24450 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.474279] env[69328]: DEBUG oslo_vmware.api [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 1102.474279] env[69328]: value = "task-3274060" [ 1102.474279] env[69328]: _type = "Task" [ 1102.474279] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.484601] env[69328]: DEBUG oslo_vmware.api [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3274060, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.509231] env[69328]: INFO nova.compute.manager [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Took 19.38 seconds to build instance. [ 1102.829756] env[69328]: DEBUG oslo_vmware.api [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5211d620-20c6-6110-4622-5de843926b99, 'name': SearchDatastore_Task, 'duration_secs': 0.031439} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.830223] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1102.830580] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1102.830953] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1102.831222] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1102.831485] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1102.831889] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e71b378a-91e3-4035-be26-d9a750536206 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.842690] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1102.842831] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1102.843664] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe77ee18-c00a-4814-b979-2672269f5974 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.849717] env[69328]: DEBUG oslo_vmware.api [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1102.849717] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]520777d6-d35b-dec8-db7e-b3e8709c9b44" [ 1102.849717] env[69328]: _type = "Task" [ 1102.849717] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.859284] env[69328]: DEBUG oslo_vmware.api [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]520777d6-d35b-dec8-db7e-b3e8709c9b44, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.916116] env[69328]: DEBUG nova.network.neutron [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Updating instance_info_cache with network_info: [{"id": "7da3de27-ee87-400f-ae26-a3a6995a8363", "address": "fa:16:3e:91:9b:b5", "network": {"id": "bd41ac10-1850-45a2-8e46-6ebdca3d8e13", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-766393176-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.150", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efd0e2d2f9ba4416bd8fd08dad912465", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e614f8e-6b11-4b6b-a421-904bca6acd91", "external-id": "nsx-vlan-transportzone-923", "segmentation_id": 923, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7da3de27-ee", "ovs_interfaceid": "7da3de27-ee87-400f-ae26-a3a6995a8363", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1102.985147] env[69328]: DEBUG oslo_vmware.api [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3274060, 'name': Rename_Task, 'duration_secs': 0.144043} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.987703] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: aaa9deb3-9a52-43e3-bf9b-a53922439be2] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1102.988170] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-58381447-152f-4fa7-86e7-2f49390a7dcf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.999043] env[69328]: DEBUG oslo_vmware.api [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 1102.999043] env[69328]: value = "task-3274061" [ 1102.999043] env[69328]: _type = "Task" [ 1102.999043] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.006516] env[69328]: DEBUG oslo_vmware.api [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3274061, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.012027] env[69328]: DEBUG oslo_concurrency.lockutils [None req-567c70b4-821a-4dd5-b4ce-49464f73a654 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Lock "fb2d04d8-cff6-414c-9d50-3ab61729546d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.894s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1103.155459] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-072768fc-3f20-4be5-8ceb-838005c6a480 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.166984] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6494c89b-52b0-459d-8b30-e51d03318d25 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.203669] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-031927fd-4440-4d25-b9aa-2888ade1ff05 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.212109] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8befac72-ae62-45df-b7a5-b624c96bbb7f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.227481] env[69328]: DEBUG nova.compute.provider_tree [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1103.367033] env[69328]: DEBUG oslo_vmware.api [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]520777d6-d35b-dec8-db7e-b3e8709c9b44, 'name': SearchDatastore_Task, 'duration_secs': 0.029239} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.368416] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f06d8128-9fef-4f84-82ed-dcf34da709c7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.377168] env[69328]: DEBUG oslo_vmware.api [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1103.377168] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]522da7cd-3af1-2a56-1359-45f0dce154ad" [ 1103.377168] env[69328]: _type = "Task" [ 1103.377168] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.387744] env[69328]: DEBUG oslo_vmware.api [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]522da7cd-3af1-2a56-1359-45f0dce154ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.421289] env[69328]: DEBUG oslo_concurrency.lockutils [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Releasing lock "refresh_cache-52c87371-4142-40d6-ac68-804aabd9f823" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1103.506511] env[69328]: DEBUG oslo_vmware.api [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3274061, 'name': PowerOnVM_Task, 'duration_secs': 0.477722} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.506804] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: aaa9deb3-9a52-43e3-bf9b-a53922439be2] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1103.507020] env[69328]: INFO nova.compute.manager [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: aaa9deb3-9a52-43e3-bf9b-a53922439be2] Took 7.60 seconds to spawn the instance on the hypervisor. [ 1103.507379] env[69328]: DEBUG nova.compute.manager [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: aaa9deb3-9a52-43e3-bf9b-a53922439be2] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1103.509485] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73d2c69c-a1ca-4ef3-a106-3e092124705c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.641212] env[69328]: DEBUG nova.compute.manager [req-dbc77b47-663c-4496-80ca-9ea003cd9439 req-52258b93-7f3f-4ab1-95a4-b71670ba892b service nova] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Received event network-changed-d0a9a5ba-8927-4de7-892b-8444448e4551 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1103.641212] env[69328]: DEBUG nova.compute.manager [req-dbc77b47-663c-4496-80ca-9ea003cd9439 req-52258b93-7f3f-4ab1-95a4-b71670ba892b service nova] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Refreshing instance network info cache due to event network-changed-d0a9a5ba-8927-4de7-892b-8444448e4551. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1103.641212] env[69328]: DEBUG oslo_concurrency.lockutils [req-dbc77b47-663c-4496-80ca-9ea003cd9439 req-52258b93-7f3f-4ab1-95a4-b71670ba892b service nova] Acquiring lock "refresh_cache-fb2d04d8-cff6-414c-9d50-3ab61729546d" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1103.641212] env[69328]: DEBUG oslo_concurrency.lockutils [req-dbc77b47-663c-4496-80ca-9ea003cd9439 req-52258b93-7f3f-4ab1-95a4-b71670ba892b service nova] Acquired lock "refresh_cache-fb2d04d8-cff6-414c-9d50-3ab61729546d" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1103.641212] env[69328]: DEBUG nova.network.neutron [req-dbc77b47-663c-4496-80ca-9ea003cd9439 req-52258b93-7f3f-4ab1-95a4-b71670ba892b service nova] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Refreshing network info cache for port d0a9a5ba-8927-4de7-892b-8444448e4551 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1103.655663] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b144be96-a146-47e0-a389-09f44059da41 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquiring lock "65e38a02-880b-46e2-8866-645a9fc17c7a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1103.655663] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b144be96-a146-47e0-a389-09f44059da41 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Lock "65e38a02-880b-46e2-8866-645a9fc17c7a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1103.655663] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b144be96-a146-47e0-a389-09f44059da41 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquiring lock "65e38a02-880b-46e2-8866-645a9fc17c7a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1103.655663] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b144be96-a146-47e0-a389-09f44059da41 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Lock "65e38a02-880b-46e2-8866-645a9fc17c7a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1103.655663] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b144be96-a146-47e0-a389-09f44059da41 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Lock "65e38a02-880b-46e2-8866-645a9fc17c7a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1103.658052] env[69328]: INFO nova.compute.manager [None req-b144be96-a146-47e0-a389-09f44059da41 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Terminating instance [ 1103.730256] env[69328]: DEBUG nova.scheduler.client.report [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1103.890149] env[69328]: DEBUG oslo_vmware.api [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]522da7cd-3af1-2a56-1359-45f0dce154ad, 'name': SearchDatastore_Task, 'duration_secs': 0.012113} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.890420] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1103.890679] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34/dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1103.890983] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-75693c39-62b3-41f4-8eb4-90895c527e88 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.900364] env[69328]: DEBUG oslo_vmware.api [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1103.900364] env[69328]: value = "task-3274063" [ 1103.900364] env[69328]: _type = "Task" [ 1103.900364] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.909546] env[69328]: DEBUG oslo_vmware.api [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274063, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.031528] env[69328]: INFO nova.compute.manager [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: aaa9deb3-9a52-43e3-bf9b-a53922439be2] Took 19.42 seconds to build instance. [ 1104.163974] env[69328]: DEBUG nova.compute.manager [None req-b144be96-a146-47e0-a389-09f44059da41 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1104.164243] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b144be96-a146-47e0-a389-09f44059da41 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1104.165217] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70c2c8da-24cd-4a47-a694-acc2dc91897f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.174540] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-b144be96-a146-47e0-a389-09f44059da41 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1104.174862] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2a036768-858b-4a2d-8bc1-9ec30ed26006 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.185167] env[69328]: DEBUG oslo_vmware.api [None req-b144be96-a146-47e0-a389-09f44059da41 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 1104.185167] env[69328]: value = "task-3274064" [ 1104.185167] env[69328]: _type = "Task" [ 1104.185167] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.196373] env[69328]: DEBUG oslo_vmware.api [None req-b144be96-a146-47e0-a389-09f44059da41 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3274064, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.239838] env[69328]: DEBUG oslo_concurrency.lockutils [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.354s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1104.240417] env[69328]: DEBUG nova.compute.manager [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1104.243360] env[69328]: DEBUG oslo_concurrency.lockutils [None req-496803d9-9d70-49b6-972f-7af2add245d8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.792s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1104.244150] env[69328]: DEBUG nova.objects.instance [None req-496803d9-9d70-49b6-972f-7af2add245d8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Lazy-loading 'resources' on Instance uuid 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1104.412865] env[69328]: DEBUG oslo_vmware.api [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274063, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.498547] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Acquiring lock "5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1104.498790] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Lock "5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1104.534018] env[69328]: DEBUG oslo_concurrency.lockutils [None req-179c72a9-28fd-4831-9769-d45053fea067 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "aaa9deb3-9a52-43e3-bf9b-a53922439be2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.940s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1104.542744] env[69328]: DEBUG nova.network.neutron [req-dbc77b47-663c-4496-80ca-9ea003cd9439 req-52258b93-7f3f-4ab1-95a4-b71670ba892b service nova] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Updated VIF entry in instance network info cache for port d0a9a5ba-8927-4de7-892b-8444448e4551. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1104.543182] env[69328]: DEBUG nova.network.neutron [req-dbc77b47-663c-4496-80ca-9ea003cd9439 req-52258b93-7f3f-4ab1-95a4-b71670ba892b service nova] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Updating instance_info_cache with network_info: [{"id": "d0a9a5ba-8927-4de7-892b-8444448e4551", "address": "fa:16:3e:41:00:13", "network": {"id": "363f5dfe-bfe3-453b-bf9f-9a28c32fa90f", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-59500137-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64a88af5392e4ee383413e85730a84d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "975b168a-03e5-449d-95ac-4d51ba027242", "external-id": "nsx-vlan-transportzone-365", "segmentation_id": 365, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0a9a5ba-89", "ovs_interfaceid": "d0a9a5ba-8927-4de7-892b-8444448e4551", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1104.696501] env[69328]: DEBUG oslo_vmware.api [None req-b144be96-a146-47e0-a389-09f44059da41 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3274064, 'name': PowerOffVM_Task, 'duration_secs': 0.343074} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.696795] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-b144be96-a146-47e0-a389-09f44059da41 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1104.696983] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b144be96-a146-47e0-a389-09f44059da41 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1104.697280] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f03d368f-8b36-4405-a373-9b7499963eeb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.748912] env[69328]: DEBUG nova.compute.utils [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1104.754126] env[69328]: DEBUG nova.compute.manager [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1104.754126] env[69328]: DEBUG nova.network.neutron [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1104.775162] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b144be96-a146-47e0-a389-09f44059da41 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1104.776689] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b144be96-a146-47e0-a389-09f44059da41 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1104.776689] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-b144be96-a146-47e0-a389-09f44059da41 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Deleting the datastore file [datastore2] 65e38a02-880b-46e2-8866-645a9fc17c7a {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1104.778717] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-863d76ff-4ae5-4506-85d1-6a663c5816a9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.788279] env[69328]: DEBUG oslo_vmware.api [None req-b144be96-a146-47e0-a389-09f44059da41 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for the task: (returnval){ [ 1104.788279] env[69328]: value = "task-3274066" [ 1104.788279] env[69328]: _type = "Task" [ 1104.788279] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.800124] env[69328]: DEBUG oslo_vmware.api [None req-b144be96-a146-47e0-a389-09f44059da41 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3274066, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.835396] env[69328]: DEBUG nova.policy [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '69ca01fd1d0f42b0b05a5426da9753ad', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '30209bc93a4042488f15c73b7e4733d5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 1104.917320] env[69328]: DEBUG oslo_vmware.api [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274063, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.574006} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.919504] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34/dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1104.919831] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1104.920198] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-55046754-00d1-4697-9ae4-b6c9159bd063 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.929267] env[69328]: DEBUG oslo_vmware.api [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1104.929267] env[69328]: value = "task-3274067" [ 1104.929267] env[69328]: _type = "Task" [ 1104.929267] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.940842] env[69328]: DEBUG oslo_vmware.api [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274067, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.974515] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1104.974870] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-412f3a89-a97c-45a5-8435-4c36fb7501d3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.985439] env[69328]: DEBUG oslo_vmware.api [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 1104.985439] env[69328]: value = "task-3274068" [ 1104.985439] env[69328]: _type = "Task" [ 1104.985439] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.997076] env[69328]: DEBUG oslo_vmware.api [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3274068, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.002221] env[69328]: DEBUG nova.compute.manager [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1105.021741] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0cbb450-f4ac-4306-bdfb-eb1427940d84 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.030040] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce472726-29b4-4f8e-8ecf-3c34ffd02ef9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.063221] env[69328]: DEBUG oslo_concurrency.lockutils [req-dbc77b47-663c-4496-80ca-9ea003cd9439 req-52258b93-7f3f-4ab1-95a4-b71670ba892b service nova] Releasing lock "refresh_cache-fb2d04d8-cff6-414c-9d50-3ab61729546d" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1105.064425] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fcdbce4-38c0-4c72-a8d1-01de22e02684 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.074895] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c09ef28-640f-49b5-80eb-0593559bee97 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.089802] env[69328]: DEBUG nova.compute.provider_tree [None req-496803d9-9d70-49b6-972f-7af2add245d8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1105.258740] env[69328]: DEBUG nova.compute.manager [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1105.266429] env[69328]: DEBUG nova.network.neutron [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Successfully created port: 13436ecc-0cb3-4c13-bf18-f81195196ffd {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1105.298324] env[69328]: DEBUG oslo_vmware.api [None req-b144be96-a146-47e0-a389-09f44059da41 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Task: {'id': task-3274066, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.26312} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.299317] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-b144be96-a146-47e0-a389-09f44059da41 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1105.299518] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b144be96-a146-47e0-a389-09f44059da41 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1105.299695] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b144be96-a146-47e0-a389-09f44059da41 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1105.299936] env[69328]: INFO nova.compute.manager [None req-b144be96-a146-47e0-a389-09f44059da41 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1105.300226] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b144be96-a146-47e0-a389-09f44059da41 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1105.300424] env[69328]: DEBUG nova.compute.manager [-] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1105.300522] env[69328]: DEBUG nova.network.neutron [-] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1105.412076] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "ee3609ea-0855-47c2-874c-349c80419781" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1105.412442] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "ee3609ea-0855-47c2-874c-349c80419781" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1105.441755] env[69328]: DEBUG oslo_vmware.api [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274067, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080897} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.441964] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1105.442876] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-105d7a91-1108-477b-9a75-5dce944640cb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.469379] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Reconfiguring VM instance instance-00000066 to attach disk [datastore2] dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34/dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1105.469726] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a64778ba-6cc1-48d4-8cbe-1957142934db {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.497365] env[69328]: DEBUG oslo_vmware.api [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3274068, 'name': PowerOffVM_Task, 'duration_secs': 0.232446} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.498895] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1105.499332] env[69328]: DEBUG oslo_vmware.api [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1105.499332] env[69328]: value = "task-3274069" [ 1105.499332] env[69328]: _type = "Task" [ 1105.499332] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.500402] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05b92e7e-60f8-4db5-ab00-fbfb31a5cae2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.535105] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03f7d704-8b70-4035-8ad7-0aabbcb94dac {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.538470] env[69328]: DEBUG oslo_vmware.api [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274069, 'name': ReconfigVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.563623] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1105.593692] env[69328]: DEBUG nova.scheduler.client.report [None req-496803d9-9d70-49b6-972f-7af2add245d8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1105.600312] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1105.600653] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-46a288d7-eba2-4cea-b282-527e242bb8e2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.612036] env[69328]: DEBUG oslo_vmware.api [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 1105.612036] env[69328]: value = "task-3274070" [ 1105.612036] env[69328]: _type = "Task" [ 1105.612036] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.616313] env[69328]: DEBUG nova.compute.manager [req-d333d34b-4a65-478b-addd-63444b88f954 req-92c9576f-5797-4e72-a4f4-b458d7003b3a service nova] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Received event network-vif-deleted-dce2dda5-86c0-4f69-a3c1-c0f7a5c3b56e {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1105.616313] env[69328]: INFO nova.compute.manager [req-d333d34b-4a65-478b-addd-63444b88f954 req-92c9576f-5797-4e72-a4f4-b458d7003b3a service nova] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Neutron deleted interface dce2dda5-86c0-4f69-a3c1-c0f7a5c3b56e; detaching it from the instance and deleting it from the info cache [ 1105.616313] env[69328]: DEBUG nova.network.neutron [req-d333d34b-4a65-478b-addd-63444b88f954 req-92c9576f-5797-4e72-a4f4-b458d7003b3a service nova] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1105.629970] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] VM already powered off {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1105.630228] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1105.630425] env[69328]: DEBUG oslo_concurrency.lockutils [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1105.630590] env[69328]: DEBUG oslo_concurrency.lockutils [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1105.630795] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1105.631133] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-81d2d378-c2c0-41ea-a1bd-491b312eea8e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.644338] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1105.648954] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1105.648954] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da5bc2ff-667c-4965-a3e2-0c604921b97b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.652538] env[69328]: DEBUG oslo_vmware.api [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 1105.652538] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52020684-0842-54d9-edad-d99f457b6350" [ 1105.652538] env[69328]: _type = "Task" [ 1105.652538] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.665302] env[69328]: DEBUG oslo_vmware.api [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52020684-0842-54d9-edad-d99f457b6350, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.666307] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-795e502a-82f2-40bd-bb7e-2a2d8726da4c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Volume attach. Driver type: vmdk {{(pid=69328) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1105.666511] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-795e502a-82f2-40bd-bb7e-2a2d8726da4c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653941', 'volume_id': '1afb6c74-c9b8-4214-9cd1-b9f3396261c4', 'name': 'volume-1afb6c74-c9b8-4214-9cd1-b9f3396261c4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '071c1837-9d0b-4b69-b16e-991b300385fb', 'attached_at': '', 'detached_at': '', 'volume_id': '1afb6c74-c9b8-4214-9cd1-b9f3396261c4', 'serial': '1afb6c74-c9b8-4214-9cd1-b9f3396261c4'} {{(pid=69328) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1105.667280] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de0e4c48-5e54-4530-932e-1fb20c56bfd5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.686842] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3346324-dabe-4efe-b0e3-568ca8bed9f1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.716015] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-795e502a-82f2-40bd-bb7e-2a2d8726da4c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Reconfiguring VM instance instance-00000065 to attach disk [datastore2] volume-1afb6c74-c9b8-4214-9cd1-b9f3396261c4/volume-1afb6c74-c9b8-4214-9cd1-b9f3396261c4.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1105.716426] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f675838a-adde-42be-9903-d7cd7e75969e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.736879] env[69328]: DEBUG oslo_vmware.api [None req-795e502a-82f2-40bd-bb7e-2a2d8726da4c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for the task: (returnval){ [ 1105.736879] env[69328]: value = "task-3274071" [ 1105.736879] env[69328]: _type = "Task" [ 1105.736879] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.741439] env[69328]: DEBUG oslo_concurrency.lockutils [None req-59dd19c1-4d8c-403e-a49d-e42a386fe4a1 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "aaa9deb3-9a52-43e3-bf9b-a53922439be2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1105.741743] env[69328]: DEBUG oslo_concurrency.lockutils [None req-59dd19c1-4d8c-403e-a49d-e42a386fe4a1 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "aaa9deb3-9a52-43e3-bf9b-a53922439be2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1105.742016] env[69328]: DEBUG oslo_concurrency.lockutils [None req-59dd19c1-4d8c-403e-a49d-e42a386fe4a1 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "aaa9deb3-9a52-43e3-bf9b-a53922439be2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1105.742272] env[69328]: DEBUG oslo_concurrency.lockutils [None req-59dd19c1-4d8c-403e-a49d-e42a386fe4a1 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "aaa9deb3-9a52-43e3-bf9b-a53922439be2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1105.742490] env[69328]: DEBUG oslo_concurrency.lockutils [None req-59dd19c1-4d8c-403e-a49d-e42a386fe4a1 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "aaa9deb3-9a52-43e3-bf9b-a53922439be2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1105.745174] env[69328]: INFO nova.compute.manager [None req-59dd19c1-4d8c-403e-a49d-e42a386fe4a1 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: aaa9deb3-9a52-43e3-bf9b-a53922439be2] Terminating instance [ 1105.755012] env[69328]: DEBUG oslo_vmware.api [None req-795e502a-82f2-40bd-bb7e-2a2d8726da4c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3274071, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.919192] env[69328]: DEBUG nova.compute.manager [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1106.015157] env[69328]: DEBUG oslo_vmware.api [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274069, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.098514] env[69328]: DEBUG nova.network.neutron [-] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1106.104162] env[69328]: DEBUG oslo_concurrency.lockutils [None req-496803d9-9d70-49b6-972f-7af2add245d8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.861s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1106.107378] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.544s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1106.108860] env[69328]: INFO nova.compute.claims [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1106.123280] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-051147c4-3523-4931-b38f-d64b9609e46c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.136944] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bd70289-e7e4-4a95-ab37-13e61da4c50d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.151734] env[69328]: INFO nova.scheduler.client.report [None req-496803d9-9d70-49b6-972f-7af2add245d8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Deleted allocations for instance 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee [ 1106.168550] env[69328]: DEBUG oslo_vmware.api [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52020684-0842-54d9-edad-d99f457b6350, 'name': SearchDatastore_Task, 'duration_secs': 0.022396} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.169246] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3d4e8d3-50e7-47fd-bb2e-a104725ce6c6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.184310] env[69328]: DEBUG nova.compute.manager [req-d333d34b-4a65-478b-addd-63444b88f954 req-92c9576f-5797-4e72-a4f4-b458d7003b3a service nova] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Detach interface failed, port_id=dce2dda5-86c0-4f69-a3c1-c0f7a5c3b56e, reason: Instance 65e38a02-880b-46e2-8866-645a9fc17c7a could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1106.189431] env[69328]: DEBUG oslo_vmware.api [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 1106.189431] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52274b23-c6f6-68bd-623a-ff9663207840" [ 1106.189431] env[69328]: _type = "Task" [ 1106.189431] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.198605] env[69328]: DEBUG oslo_vmware.api [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52274b23-c6f6-68bd-623a-ff9663207840, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.247667] env[69328]: DEBUG oslo_vmware.api [None req-795e502a-82f2-40bd-bb7e-2a2d8726da4c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3274071, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.250492] env[69328]: DEBUG nova.compute.manager [None req-59dd19c1-4d8c-403e-a49d-e42a386fe4a1 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: aaa9deb3-9a52-43e3-bf9b-a53922439be2] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1106.250635] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-59dd19c1-4d8c-403e-a49d-e42a386fe4a1 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: aaa9deb3-9a52-43e3-bf9b-a53922439be2] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1106.251487] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30e6213d-a383-4556-9ec2-2ca01d9f5e0b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.261033] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-59dd19c1-4d8c-403e-a49d-e42a386fe4a1 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: aaa9deb3-9a52-43e3-bf9b-a53922439be2] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1106.261338] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-52b1752b-528d-4977-bce1-da96b187ec38 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.270014] env[69328]: DEBUG oslo_vmware.api [None req-59dd19c1-4d8c-403e-a49d-e42a386fe4a1 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 1106.270014] env[69328]: value = "task-3274072" [ 1106.270014] env[69328]: _type = "Task" [ 1106.270014] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.275462] env[69328]: DEBUG nova.compute.manager [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1106.284868] env[69328]: DEBUG oslo_vmware.api [None req-59dd19c1-4d8c-403e-a49d-e42a386fe4a1 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3274072, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.303407] env[69328]: DEBUG nova.virt.hardware [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1106.303689] env[69328]: DEBUG nova.virt.hardware [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1106.303901] env[69328]: DEBUG nova.virt.hardware [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1106.304170] env[69328]: DEBUG nova.virt.hardware [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1106.304430] env[69328]: DEBUG nova.virt.hardware [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1106.304622] env[69328]: DEBUG nova.virt.hardware [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1106.304883] env[69328]: DEBUG nova.virt.hardware [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1106.305073] env[69328]: DEBUG nova.virt.hardware [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1106.305249] env[69328]: DEBUG nova.virt.hardware [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1106.305413] env[69328]: DEBUG nova.virt.hardware [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1106.305590] env[69328]: DEBUG nova.virt.hardware [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1106.306497] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50331aa7-1b2f-4406-ba5f-8a624af9c53c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.315467] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dcb4e47-90df-476d-b69f-c67b38ba6908 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.445883] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1106.493447] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f91340a8-6da6-4e74-bb88-0a6655022c4b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Acquiring lock "76210566-12d7-4f6a-afa1-6329e87e0f85" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1106.494385] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f91340a8-6da6-4e74-bb88-0a6655022c4b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lock "76210566-12d7-4f6a-afa1-6329e87e0f85" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1106.515709] env[69328]: DEBUG oslo_vmware.api [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274069, 'name': ReconfigVM_Task, 'duration_secs': 0.66545} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.515981] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Reconfigured VM instance instance-00000066 to attach disk [datastore2] dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34/dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1106.516655] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b9c04c26-fa60-422c-a590-8c1f0ab377e1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.523023] env[69328]: DEBUG oslo_vmware.api [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1106.523023] env[69328]: value = "task-3274073" [ 1106.523023] env[69328]: _type = "Task" [ 1106.523023] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.534066] env[69328]: DEBUG oslo_vmware.api [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274073, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.601128] env[69328]: INFO nova.compute.manager [-] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Took 1.30 seconds to deallocate network for instance. [ 1106.660295] env[69328]: DEBUG oslo_concurrency.lockutils [None req-496803d9-9d70-49b6-972f-7af2add245d8 tempest-ServersListShow298Test-97043612 tempest-ServersListShow298Test-97043612-project-member] Lock "5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.457s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1106.702627] env[69328]: DEBUG oslo_vmware.api [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52274b23-c6f6-68bd-623a-ff9663207840, 'name': SearchDatastore_Task, 'duration_secs': 0.012765} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.702996] env[69328]: DEBUG oslo_concurrency.lockutils [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1106.703342] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 52c87371-4142-40d6-ac68-804aabd9f823/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318-rescue.vmdk. {{(pid=69328) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1106.703648] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f8f6c42b-1b5a-4bb3-bf68-6c812b68e712 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.713410] env[69328]: DEBUG oslo_vmware.api [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 1106.713410] env[69328]: value = "task-3274074" [ 1106.713410] env[69328]: _type = "Task" [ 1106.713410] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.722808] env[69328]: DEBUG oslo_vmware.api [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3274074, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.748092] env[69328]: DEBUG oslo_vmware.api [None req-795e502a-82f2-40bd-bb7e-2a2d8726da4c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3274071, 'name': ReconfigVM_Task, 'duration_secs': 0.764884} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.748411] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-795e502a-82f2-40bd-bb7e-2a2d8726da4c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Reconfigured VM instance instance-00000065 to attach disk [datastore2] volume-1afb6c74-c9b8-4214-9cd1-b9f3396261c4/volume-1afb6c74-c9b8-4214-9cd1-b9f3396261c4.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1106.753392] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7175af4a-710b-4bd5-99ce-8faf230d10f3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.785724] env[69328]: DEBUG oslo_vmware.api [None req-59dd19c1-4d8c-403e-a49d-e42a386fe4a1 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3274072, 'name': PowerOffVM_Task, 'duration_secs': 0.283043} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.787794] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-59dd19c1-4d8c-403e-a49d-e42a386fe4a1 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: aaa9deb3-9a52-43e3-bf9b-a53922439be2] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1106.787999] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-59dd19c1-4d8c-403e-a49d-e42a386fe4a1 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: aaa9deb3-9a52-43e3-bf9b-a53922439be2] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1106.788482] env[69328]: DEBUG oslo_vmware.api [None req-795e502a-82f2-40bd-bb7e-2a2d8726da4c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for the task: (returnval){ [ 1106.788482] env[69328]: value = "task-3274075" [ 1106.788482] env[69328]: _type = "Task" [ 1106.788482] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.788769] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9a8e8285-92be-4f74-a215-05dd6896cff1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.806887] env[69328]: DEBUG oslo_vmware.api [None req-795e502a-82f2-40bd-bb7e-2a2d8726da4c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3274075, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.868147] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-59dd19c1-4d8c-403e-a49d-e42a386fe4a1 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: aaa9deb3-9a52-43e3-bf9b-a53922439be2] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1106.868545] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-59dd19c1-4d8c-403e-a49d-e42a386fe4a1 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: aaa9deb3-9a52-43e3-bf9b-a53922439be2] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1106.868838] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-59dd19c1-4d8c-403e-a49d-e42a386fe4a1 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Deleting the datastore file [datastore1] aaa9deb3-9a52-43e3-bf9b-a53922439be2 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1106.869311] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c2f3fc8b-aa32-4710-b62c-1920b6541782 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.874977] env[69328]: DEBUG nova.compute.manager [req-7d5de17b-67d9-4c41-bfb6-7f354a0c46ec req-3bee31b8-319d-4aa9-9cad-92066ef13623 service nova] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Received event network-vif-plugged-13436ecc-0cb3-4c13-bf18-f81195196ffd {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1106.875322] env[69328]: DEBUG oslo_concurrency.lockutils [req-7d5de17b-67d9-4c41-bfb6-7f354a0c46ec req-3bee31b8-319d-4aa9-9cad-92066ef13623 service nova] Acquiring lock "de8e6616-0460-4a6e-918c-a27818da96e2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1106.875650] env[69328]: DEBUG oslo_concurrency.lockutils [req-7d5de17b-67d9-4c41-bfb6-7f354a0c46ec req-3bee31b8-319d-4aa9-9cad-92066ef13623 service nova] Lock "de8e6616-0460-4a6e-918c-a27818da96e2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1106.875928] env[69328]: DEBUG oslo_concurrency.lockutils [req-7d5de17b-67d9-4c41-bfb6-7f354a0c46ec req-3bee31b8-319d-4aa9-9cad-92066ef13623 service nova] Lock "de8e6616-0460-4a6e-918c-a27818da96e2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1106.876212] env[69328]: DEBUG nova.compute.manager [req-7d5de17b-67d9-4c41-bfb6-7f354a0c46ec req-3bee31b8-319d-4aa9-9cad-92066ef13623 service nova] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] No waiting events found dispatching network-vif-plugged-13436ecc-0cb3-4c13-bf18-f81195196ffd {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1106.876619] env[69328]: WARNING nova.compute.manager [req-7d5de17b-67d9-4c41-bfb6-7f354a0c46ec req-3bee31b8-319d-4aa9-9cad-92066ef13623 service nova] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Received unexpected event network-vif-plugged-13436ecc-0cb3-4c13-bf18-f81195196ffd for instance with vm_state building and task_state spawning. [ 1106.883283] env[69328]: DEBUG oslo_vmware.api [None req-59dd19c1-4d8c-403e-a49d-e42a386fe4a1 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 1106.883283] env[69328]: value = "task-3274077" [ 1106.883283] env[69328]: _type = "Task" [ 1106.883283] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.896321] env[69328]: DEBUG oslo_vmware.api [None req-59dd19c1-4d8c-403e-a49d-e42a386fe4a1 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3274077, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.997657] env[69328]: INFO nova.compute.manager [None req-f91340a8-6da6-4e74-bb88-0a6655022c4b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Detaching volume 68ea45a2-2443-494f-afc8-d4648ea33fa0 [ 1107.015570] env[69328]: DEBUG nova.network.neutron [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Successfully updated port: 13436ecc-0cb3-4c13-bf18-f81195196ffd {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1107.035677] env[69328]: DEBUG oslo_vmware.api [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274073, 'name': Rename_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.036562] env[69328]: INFO nova.virt.block_device [None req-f91340a8-6da6-4e74-bb88-0a6655022c4b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Attempting to driver detach volume 68ea45a2-2443-494f-afc8-d4648ea33fa0 from mountpoint /dev/sdb [ 1107.036855] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-f91340a8-6da6-4e74-bb88-0a6655022c4b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Volume detach. Driver type: vmdk {{(pid=69328) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1107.037101] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-f91340a8-6da6-4e74-bb88-0a6655022c4b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653905', 'volume_id': '68ea45a2-2443-494f-afc8-d4648ea33fa0', 'name': 'volume-68ea45a2-2443-494f-afc8-d4648ea33fa0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '76210566-12d7-4f6a-afa1-6329e87e0f85', 'attached_at': '', 'detached_at': '', 'volume_id': '68ea45a2-2443-494f-afc8-d4648ea33fa0', 'serial': '68ea45a2-2443-494f-afc8-d4648ea33fa0'} {{(pid=69328) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1107.037900] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aa49a95-c1c6-4ebf-a308-0fd17c56072d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.060707] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dabf76fb-ff26-4c32-bc5b-ca0d08cf43a9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.069323] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05a58b10-9df8-4894-8d4c-1f1f76ea02c9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.093416] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42112291-a6f5-409d-974c-713cd497dbdf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.110624] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b144be96-a146-47e0-a389-09f44059da41 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1107.110944] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-f91340a8-6da6-4e74-bb88-0a6655022c4b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] The volume has not been displaced from its original location: [datastore2] volume-68ea45a2-2443-494f-afc8-d4648ea33fa0/volume-68ea45a2-2443-494f-afc8-d4648ea33fa0.vmdk. No consolidation needed. {{(pid=69328) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1107.116419] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-f91340a8-6da6-4e74-bb88-0a6655022c4b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Reconfiguring VM instance instance-0000004c to detach disk 2001 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1107.119398] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0ecc5471-74f7-421e-879d-748a4c97768f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.142958] env[69328]: DEBUG oslo_vmware.api [None req-f91340a8-6da6-4e74-bb88-0a6655022c4b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 1107.142958] env[69328]: value = "task-3274078" [ 1107.142958] env[69328]: _type = "Task" [ 1107.142958] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.154240] env[69328]: DEBUG oslo_vmware.api [None req-f91340a8-6da6-4e74-bb88-0a6655022c4b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274078, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.224816] env[69328]: DEBUG oslo_vmware.api [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3274074, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.301646] env[69328]: DEBUG oslo_vmware.api [None req-795e502a-82f2-40bd-bb7e-2a2d8726da4c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3274075, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.394564] env[69328]: DEBUG oslo_vmware.api [None req-59dd19c1-4d8c-403e-a49d-e42a386fe4a1 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3274077, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.486011} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.394825] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-59dd19c1-4d8c-403e-a49d-e42a386fe4a1 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1107.395027] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-59dd19c1-4d8c-403e-a49d-e42a386fe4a1 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: aaa9deb3-9a52-43e3-bf9b-a53922439be2] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1107.395217] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-59dd19c1-4d8c-403e-a49d-e42a386fe4a1 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: aaa9deb3-9a52-43e3-bf9b-a53922439be2] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1107.395396] env[69328]: INFO nova.compute.manager [None req-59dd19c1-4d8c-403e-a49d-e42a386fe4a1 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: aaa9deb3-9a52-43e3-bf9b-a53922439be2] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1107.395645] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-59dd19c1-4d8c-403e-a49d-e42a386fe4a1 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1107.395871] env[69328]: DEBUG nova.compute.manager [-] [instance: aaa9deb3-9a52-43e3-bf9b-a53922439be2] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1107.395931] env[69328]: DEBUG nova.network.neutron [-] [instance: aaa9deb3-9a52-43e3-bf9b-a53922439be2] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1107.399253] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b35ec22-14b6-45e4-b078-1e448d366479 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.408122] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d330c00e-bd93-4362-9a57-07bd6d781be6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.444089] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70d9edb0-1db1-4fb9-9353-81685db89ef4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.453302] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aae91c22-d018-4a7a-96f9-f50c0e40a12a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.471783] env[69328]: DEBUG nova.compute.provider_tree [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1107.522413] env[69328]: DEBUG oslo_concurrency.lockutils [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "refresh_cache-de8e6616-0460-4a6e-918c-a27818da96e2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1107.522697] env[69328]: DEBUG oslo_concurrency.lockutils [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquired lock "refresh_cache-de8e6616-0460-4a6e-918c-a27818da96e2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1107.522943] env[69328]: DEBUG nova.network.neutron [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1107.539353] env[69328]: DEBUG oslo_vmware.api [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274073, 'name': Rename_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.658865] env[69328]: DEBUG oslo_vmware.api [None req-f91340a8-6da6-4e74-bb88-0a6655022c4b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274078, 'name': ReconfigVM_Task, 'duration_secs': 0.384996} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.659155] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-f91340a8-6da6-4e74-bb88-0a6655022c4b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Reconfigured VM instance instance-0000004c to detach disk 2001 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1107.664116] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cd67d2f0-5f4a-4564-a3d9-0b0c77629479 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.684371] env[69328]: DEBUG oslo_vmware.api [None req-f91340a8-6da6-4e74-bb88-0a6655022c4b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 1107.684371] env[69328]: value = "task-3274079" [ 1107.684371] env[69328]: _type = "Task" [ 1107.684371] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.693909] env[69328]: DEBUG oslo_vmware.api [None req-f91340a8-6da6-4e74-bb88-0a6655022c4b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274079, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.727540] env[69328]: DEBUG oslo_vmware.api [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3274074, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.804278] env[69328]: DEBUG oslo_vmware.api [None req-795e502a-82f2-40bd-bb7e-2a2d8726da4c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3274075, 'name': ReconfigVM_Task, 'duration_secs': 0.876595} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.805067] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-795e502a-82f2-40bd-bb7e-2a2d8726da4c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653941', 'volume_id': '1afb6c74-c9b8-4214-9cd1-b9f3396261c4', 'name': 'volume-1afb6c74-c9b8-4214-9cd1-b9f3396261c4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '071c1837-9d0b-4b69-b16e-991b300385fb', 'attached_at': '', 'detached_at': '', 'volume_id': '1afb6c74-c9b8-4214-9cd1-b9f3396261c4', 'serial': '1afb6c74-c9b8-4214-9cd1-b9f3396261c4'} {{(pid=69328) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1107.976278] env[69328]: DEBUG nova.scheduler.client.report [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1108.039586] env[69328]: DEBUG oslo_vmware.api [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274073, 'name': Rename_Task, 'duration_secs': 1.094524} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.040542] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1108.040947] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ae9b7381-9298-4390-a11a-d6a5e6166401 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.050068] env[69328]: DEBUG oslo_vmware.api [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1108.050068] env[69328]: value = "task-3274080" [ 1108.050068] env[69328]: _type = "Task" [ 1108.050068] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.059430] env[69328]: DEBUG oslo_vmware.api [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274080, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.085719] env[69328]: DEBUG nova.network.neutron [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1108.195495] env[69328]: DEBUG oslo_vmware.api [None req-f91340a8-6da6-4e74-bb88-0a6655022c4b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274079, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.227091] env[69328]: DEBUG oslo_vmware.api [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3274074, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.230208} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.227091] env[69328]: INFO nova.virt.vmwareapi.ds_util [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 52c87371-4142-40d6-ac68-804aabd9f823/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318-rescue.vmdk. [ 1108.227091] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c31523b5-9fae-4c75-9e48-9b6f47fde80d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.249650] env[69328]: DEBUG nova.network.neutron [-] [instance: aaa9deb3-9a52-43e3-bf9b-a53922439be2] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1108.258127] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Reconfiguring VM instance instance-00000056 to attach disk [datastore1] 52c87371-4142-40d6-ac68-804aabd9f823/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318-rescue.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1108.259088] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-401741d6-1454-4e8d-bdaf-9a599099599c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.278937] env[69328]: DEBUG oslo_vmware.api [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 1108.278937] env[69328]: value = "task-3274081" [ 1108.278937] env[69328]: _type = "Task" [ 1108.278937] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.288673] env[69328]: DEBUG oslo_vmware.api [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3274081, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.481554] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.375s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1108.481732] env[69328]: DEBUG nova.compute.manager [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1108.484593] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.039s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1108.489023] env[69328]: INFO nova.compute.claims [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1108.526587] env[69328]: DEBUG nova.network.neutron [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Updating instance_info_cache with network_info: [{"id": "13436ecc-0cb3-4c13-bf18-f81195196ffd", "address": "fa:16:3e:2e:1b:14", "network": {"id": "aed15283-4a79-4e99-8b6c-49cf754138de", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1271042528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30209bc93a4042488f15c73b7e4733d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13436ecc-0c", "ovs_interfaceid": "13436ecc-0cb3-4c13-bf18-f81195196ffd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1108.561623] env[69328]: DEBUG oslo_vmware.api [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274080, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.695689] env[69328]: DEBUG oslo_vmware.api [None req-f91340a8-6da6-4e74-bb88-0a6655022c4b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274079, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.759419] env[69328]: INFO nova.compute.manager [-] [instance: aaa9deb3-9a52-43e3-bf9b-a53922439be2] Took 1.36 seconds to deallocate network for instance. [ 1108.788991] env[69328]: DEBUG oslo_vmware.api [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3274081, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.857598] env[69328]: DEBUG nova.objects.instance [None req-795e502a-82f2-40bd-bb7e-2a2d8726da4c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lazy-loading 'flavor' on Instance uuid 071c1837-9d0b-4b69-b16e-991b300385fb {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1108.905275] env[69328]: DEBUG nova.compute.manager [req-ff36a0e6-b929-4605-99ec-98457965060d req-333874f3-09d0-4b15-8bde-5f3379dd9f1f service nova] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Received event network-changed-13436ecc-0cb3-4c13-bf18-f81195196ffd {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1108.905477] env[69328]: DEBUG nova.compute.manager [req-ff36a0e6-b929-4605-99ec-98457965060d req-333874f3-09d0-4b15-8bde-5f3379dd9f1f service nova] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Refreshing instance network info cache due to event network-changed-13436ecc-0cb3-4c13-bf18-f81195196ffd. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1108.905669] env[69328]: DEBUG oslo_concurrency.lockutils [req-ff36a0e6-b929-4605-99ec-98457965060d req-333874f3-09d0-4b15-8bde-5f3379dd9f1f service nova] Acquiring lock "refresh_cache-de8e6616-0460-4a6e-918c-a27818da96e2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1108.990235] env[69328]: DEBUG nova.compute.utils [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1108.993907] env[69328]: DEBUG nova.compute.manager [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1108.994171] env[69328]: DEBUG nova.network.neutron [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1109.029306] env[69328]: DEBUG oslo_concurrency.lockutils [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Releasing lock "refresh_cache-de8e6616-0460-4a6e-918c-a27818da96e2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1109.029360] env[69328]: DEBUG nova.compute.manager [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Instance network_info: |[{"id": "13436ecc-0cb3-4c13-bf18-f81195196ffd", "address": "fa:16:3e:2e:1b:14", "network": {"id": "aed15283-4a79-4e99-8b6c-49cf754138de", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1271042528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30209bc93a4042488f15c73b7e4733d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13436ecc-0c", "ovs_interfaceid": "13436ecc-0cb3-4c13-bf18-f81195196ffd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1109.030465] env[69328]: DEBUG oslo_concurrency.lockutils [req-ff36a0e6-b929-4605-99ec-98457965060d req-333874f3-09d0-4b15-8bde-5f3379dd9f1f service nova] Acquired lock "refresh_cache-de8e6616-0460-4a6e-918c-a27818da96e2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1109.030465] env[69328]: DEBUG nova.network.neutron [req-ff36a0e6-b929-4605-99ec-98457965060d req-333874f3-09d0-4b15-8bde-5f3379dd9f1f service nova] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Refreshing network info cache for port 13436ecc-0cb3-4c13-bf18-f81195196ffd {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1109.031045] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2e:1b:14', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'de7fa486-5f28-44ae-b0cf-72234ff87546', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '13436ecc-0cb3-4c13-bf18-f81195196ffd', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1109.038874] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1109.040063] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1109.040471] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4c0cf855-f544-4c34-84aa-59993f6a1df7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.056287] env[69328]: DEBUG nova.policy [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7602b3714b4f48deaa37fa08ca49a01c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e32664bc571e4ebdabcb1b4956a677fc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 1109.069371] env[69328]: DEBUG oslo_vmware.api [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274080, 'name': PowerOnVM_Task} progress is 78%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.070910] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1109.070910] env[69328]: value = "task-3274082" [ 1109.070910] env[69328]: _type = "Task" [ 1109.070910] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.080585] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274082, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.197152] env[69328]: DEBUG oslo_vmware.api [None req-f91340a8-6da6-4e74-bb88-0a6655022c4b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274079, 'name': ReconfigVM_Task, 'duration_secs': 1.202026} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.197505] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-f91340a8-6da6-4e74-bb88-0a6655022c4b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653905', 'volume_id': '68ea45a2-2443-494f-afc8-d4648ea33fa0', 'name': 'volume-68ea45a2-2443-494f-afc8-d4648ea33fa0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '76210566-12d7-4f6a-afa1-6329e87e0f85', 'attached_at': '', 'detached_at': '', 'volume_id': '68ea45a2-2443-494f-afc8-d4648ea33fa0', 'serial': '68ea45a2-2443-494f-afc8-d4648ea33fa0'} {{(pid=69328) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1109.266167] env[69328]: DEBUG oslo_concurrency.lockutils [None req-59dd19c1-4d8c-403e-a49d-e42a386fe4a1 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1109.291257] env[69328]: DEBUG oslo_vmware.api [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3274081, 'name': ReconfigVM_Task, 'duration_secs': 0.965628} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.291550] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Reconfigured VM instance instance-00000056 to attach disk [datastore1] 52c87371-4142-40d6-ac68-804aabd9f823/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318-rescue.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1109.292470] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1a08156-edc9-42a7-bf0d-7813a825de78 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.322484] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7f1403d6-d5cc-41ea-836c-9bb978f4e639 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.339063] env[69328]: DEBUG oslo_vmware.api [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 1109.339063] env[69328]: value = "task-3274083" [ 1109.339063] env[69328]: _type = "Task" [ 1109.339063] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.348460] env[69328]: DEBUG oslo_vmware.api [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3274083, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.363539] env[69328]: DEBUG oslo_concurrency.lockutils [None req-795e502a-82f2-40bd-bb7e-2a2d8726da4c tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "071c1837-9d0b-4b69-b16e-991b300385fb" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.350s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1109.431849] env[69328]: DEBUG nova.network.neutron [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Successfully created port: f52daaa5-48f6-4553-ac25-4a0103a7736f {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1109.496267] env[69328]: DEBUG nova.compute.manager [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1109.567913] env[69328]: DEBUG oslo_vmware.api [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274080, 'name': PowerOnVM_Task, 'duration_secs': 1.304376} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.568213] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1109.568418] env[69328]: DEBUG nova.compute.manager [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1109.569228] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a58b39eb-0b48-444e-b795-26985385dbc8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.587278] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274082, 'name': CreateVM_Task, 'duration_secs': 0.392503} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.590009] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1109.590856] env[69328]: DEBUG oslo_concurrency.lockutils [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1109.591051] env[69328]: DEBUG oslo_concurrency.lockutils [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1109.591366] env[69328]: DEBUG oslo_concurrency.lockutils [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1109.591649] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8961e79b-e1ae-4c4d-b988-2bc5de02c9eb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.596886] env[69328]: DEBUG oslo_vmware.api [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for the task: (returnval){ [ 1109.596886] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52239b7d-f747-1f34-528e-520db00e9019" [ 1109.596886] env[69328]: _type = "Task" [ 1109.596886] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.605867] env[69328]: DEBUG oslo_vmware.api [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52239b7d-f747-1f34-528e-520db00e9019, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.678159] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a643a3ae-873a-4480-9dda-75e2bfaccc22 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquiring lock "071c1837-9d0b-4b69-b16e-991b300385fb" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1109.678159] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a643a3ae-873a-4480-9dda-75e2bfaccc22 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "071c1837-9d0b-4b69-b16e-991b300385fb" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1109.750396] env[69328]: DEBUG nova.objects.instance [None req-f91340a8-6da6-4e74-bb88-0a6655022c4b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lazy-loading 'flavor' on Instance uuid 76210566-12d7-4f6a-afa1-6329e87e0f85 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1109.768922] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36bb45d1-6c0d-4c6f-a6bc-886d65679f88 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.777405] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ab2ed5f-22c9-445f-b27c-39e572cea943 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.812655] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-400a9b2d-c642-4e4e-95df-e91bf51b930c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.820665] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e189a9f-097d-4905-a4fc-e7df40eed2d6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.834147] env[69328]: DEBUG nova.compute.provider_tree [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1109.844952] env[69328]: DEBUG nova.network.neutron [req-ff36a0e6-b929-4605-99ec-98457965060d req-333874f3-09d0-4b15-8bde-5f3379dd9f1f service nova] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Updated VIF entry in instance network info cache for port 13436ecc-0cb3-4c13-bf18-f81195196ffd. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1109.845370] env[69328]: DEBUG nova.network.neutron [req-ff36a0e6-b929-4605-99ec-98457965060d req-333874f3-09d0-4b15-8bde-5f3379dd9f1f service nova] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Updating instance_info_cache with network_info: [{"id": "13436ecc-0cb3-4c13-bf18-f81195196ffd", "address": "fa:16:3e:2e:1b:14", "network": {"id": "aed15283-4a79-4e99-8b6c-49cf754138de", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1271042528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30209bc93a4042488f15c73b7e4733d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13436ecc-0c", "ovs_interfaceid": "13436ecc-0cb3-4c13-bf18-f81195196ffd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1109.850325] env[69328]: DEBUG oslo_vmware.api [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3274083, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.004703] env[69328]: INFO nova.virt.block_device [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Booting with volume 10778dba-1e87-4365-bb28-98360345c613 at /dev/sda [ 1110.039049] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-21b84b73-a6d4-46d1-be85-34f896c33e6d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.049730] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1a48666-15ca-48ed-b34f-8d34221119f2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.086366] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1146ac0f-0762-4749-88c1-a35e37f90d82 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.099114] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e95cc5b-c4cc-4174-9abc-30b42052d801 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.109483] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1110.119157] env[69328]: DEBUG oslo_vmware.api [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52239b7d-f747-1f34-528e-520db00e9019, 'name': SearchDatastore_Task, 'duration_secs': 0.0149} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.119157] env[69328]: DEBUG oslo_concurrency.lockutils [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1110.119157] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1110.119297] env[69328]: DEBUG oslo_concurrency.lockutils [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1110.119507] env[69328]: DEBUG oslo_concurrency.lockutils [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1110.119704] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1110.119946] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-009ba8da-7bf8-407a-8b99-56866f77e1b2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.138302] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5c1a54d-f8a4-4357-8475-5f5cdf401345 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.141010] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1110.141714] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1110.141901] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b58244a-38c5-4af5-8ac0-23fbaacc0085 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.150047] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc4bf655-a816-479e-9ee7-3a7c77aa286e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.153603] env[69328]: DEBUG oslo_vmware.api [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for the task: (returnval){ [ 1110.153603] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52001a13-1488-40e7-9aac-2de48155afd7" [ 1110.153603] env[69328]: _type = "Task" [ 1110.153603] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.161491] env[69328]: DEBUG oslo_vmware.api [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52001a13-1488-40e7-9aac-2de48155afd7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.165635] env[69328]: DEBUG nova.virt.block_device [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Updating existing volume attachment record: 1d3177e9-94bc-46ec-8e72-ac3f9fb71e28 {{(pid=69328) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1110.184577] env[69328]: INFO nova.compute.manager [None req-a643a3ae-873a-4480-9dda-75e2bfaccc22 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Detaching volume 1afb6c74-c9b8-4214-9cd1-b9f3396261c4 [ 1110.217395] env[69328]: INFO nova.virt.block_device [None req-a643a3ae-873a-4480-9dda-75e2bfaccc22 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Attempting to driver detach volume 1afb6c74-c9b8-4214-9cd1-b9f3396261c4 from mountpoint /dev/sdb [ 1110.218060] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-a643a3ae-873a-4480-9dda-75e2bfaccc22 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Volume detach. Driver type: vmdk {{(pid=69328) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1110.218060] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-a643a3ae-873a-4480-9dda-75e2bfaccc22 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653941', 'volume_id': '1afb6c74-c9b8-4214-9cd1-b9f3396261c4', 'name': 'volume-1afb6c74-c9b8-4214-9cd1-b9f3396261c4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '071c1837-9d0b-4b69-b16e-991b300385fb', 'attached_at': '', 'detached_at': '', 'volume_id': '1afb6c74-c9b8-4214-9cd1-b9f3396261c4', 'serial': '1afb6c74-c9b8-4214-9cd1-b9f3396261c4'} {{(pid=69328) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1110.218745] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaa5b7c5-01e5-4048-b683-5a05881371a2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.240331] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c1ad558-e736-4dc4-be81-a4b46493823d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.248318] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d64fed9-2607-4fd9-9a36-6693523d8c17 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.272670] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99af4cf9-8233-4db2-9ee6-e8c8a2ccb0d0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.290377] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-a643a3ae-873a-4480-9dda-75e2bfaccc22 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] The volume has not been displaced from its original location: [datastore2] volume-1afb6c74-c9b8-4214-9cd1-b9f3396261c4/volume-1afb6c74-c9b8-4214-9cd1-b9f3396261c4.vmdk. No consolidation needed. {{(pid=69328) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1110.295756] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-a643a3ae-873a-4480-9dda-75e2bfaccc22 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Reconfiguring VM instance instance-00000065 to detach disk 2001 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1110.296629] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d518053e-8868-4244-a3e7-750f455c6ac7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.317467] env[69328]: DEBUG oslo_vmware.api [None req-a643a3ae-873a-4480-9dda-75e2bfaccc22 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for the task: (returnval){ [ 1110.317467] env[69328]: value = "task-3274084" [ 1110.317467] env[69328]: _type = "Task" [ 1110.317467] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.326281] env[69328]: DEBUG oslo_vmware.api [None req-a643a3ae-873a-4480-9dda-75e2bfaccc22 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3274084, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.337326] env[69328]: DEBUG nova.scheduler.client.report [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1110.354153] env[69328]: DEBUG oslo_concurrency.lockutils [req-ff36a0e6-b929-4605-99ec-98457965060d req-333874f3-09d0-4b15-8bde-5f3379dd9f1f service nova] Releasing lock "refresh_cache-de8e6616-0460-4a6e-918c-a27818da96e2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1110.354384] env[69328]: DEBUG nova.compute.manager [req-ff36a0e6-b929-4605-99ec-98457965060d req-333874f3-09d0-4b15-8bde-5f3379dd9f1f service nova] [instance: aaa9deb3-9a52-43e3-bf9b-a53922439be2] Received event network-vif-deleted-4c564b09-8166-45b8-b7de-267cd92d78c8 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1110.355029] env[69328]: DEBUG oslo_vmware.api [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3274083, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.666808] env[69328]: DEBUG oslo_vmware.api [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52001a13-1488-40e7-9aac-2de48155afd7, 'name': SearchDatastore_Task, 'duration_secs': 0.018389} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.667789] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dab2a85f-e2c3-40fa-8c71-57f9bc2ff71d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.674151] env[69328]: DEBUG oslo_vmware.api [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for the task: (returnval){ [ 1110.674151] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e526e4-3803-ee7a-70e4-51dd63808d8e" [ 1110.674151] env[69328]: _type = "Task" [ 1110.674151] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.682414] env[69328]: DEBUG oslo_vmware.api [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e526e4-3803-ee7a-70e4-51dd63808d8e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.778052] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f91340a8-6da6-4e74-bb88-0a6655022c4b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lock "76210566-12d7-4f6a-afa1-6329e87e0f85" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.283s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1110.827663] env[69328]: DEBUG oslo_vmware.api [None req-a643a3ae-873a-4480-9dda-75e2bfaccc22 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3274084, 'name': ReconfigVM_Task, 'duration_secs': 0.391458} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.827955] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-a643a3ae-873a-4480-9dda-75e2bfaccc22 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Reconfigured VM instance instance-00000065 to detach disk 2001 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1110.834167] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-35d84aca-0baa-49c5-b791-20247bd361b6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.848034] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.363s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1110.848412] env[69328]: DEBUG nova.compute.manager [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1110.851046] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b144be96-a146-47e0-a389-09f44059da41 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.741s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1110.851274] env[69328]: DEBUG nova.objects.instance [None req-b144be96-a146-47e0-a389-09f44059da41 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Lazy-loading 'resources' on Instance uuid 65e38a02-880b-46e2-8866-645a9fc17c7a {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1110.853768] env[69328]: DEBUG oslo_vmware.api [None req-a643a3ae-873a-4480-9dda-75e2bfaccc22 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for the task: (returnval){ [ 1110.853768] env[69328]: value = "task-3274085" [ 1110.853768] env[69328]: _type = "Task" [ 1110.853768] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.860560] env[69328]: DEBUG oslo_vmware.api [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3274083, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.867879] env[69328]: DEBUG oslo_vmware.api [None req-a643a3ae-873a-4480-9dda-75e2bfaccc22 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3274085, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.935137] env[69328]: DEBUG nova.compute.manager [req-5d19b8ef-60d0-47f5-b7a4-fbb35bd4187c req-8bd464f5-cfd5-43e6-8ac2-ba12dee02e3d service nova] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Received event network-vif-plugged-f52daaa5-48f6-4553-ac25-4a0103a7736f {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1110.935373] env[69328]: DEBUG oslo_concurrency.lockutils [req-5d19b8ef-60d0-47f5-b7a4-fbb35bd4187c req-8bd464f5-cfd5-43e6-8ac2-ba12dee02e3d service nova] Acquiring lock "5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1110.935587] env[69328]: DEBUG oslo_concurrency.lockutils [req-5d19b8ef-60d0-47f5-b7a4-fbb35bd4187c req-8bd464f5-cfd5-43e6-8ac2-ba12dee02e3d service nova] Lock "5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1110.935757] env[69328]: DEBUG oslo_concurrency.lockutils [req-5d19b8ef-60d0-47f5-b7a4-fbb35bd4187c req-8bd464f5-cfd5-43e6-8ac2-ba12dee02e3d service nova] Lock "5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1110.935925] env[69328]: DEBUG nova.compute.manager [req-5d19b8ef-60d0-47f5-b7a4-fbb35bd4187c req-8bd464f5-cfd5-43e6-8ac2-ba12dee02e3d service nova] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] No waiting events found dispatching network-vif-plugged-f52daaa5-48f6-4553-ac25-4a0103a7736f {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1110.936149] env[69328]: WARNING nova.compute.manager [req-5d19b8ef-60d0-47f5-b7a4-fbb35bd4187c req-8bd464f5-cfd5-43e6-8ac2-ba12dee02e3d service nova] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Received unexpected event network-vif-plugged-f52daaa5-48f6-4553-ac25-4a0103a7736f for instance with vm_state building and task_state block_device_mapping. [ 1110.940498] env[69328]: DEBUG nova.network.neutron [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Successfully updated port: f52daaa5-48f6-4553-ac25-4a0103a7736f {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1111.186207] env[69328]: DEBUG oslo_vmware.api [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e526e4-3803-ee7a-70e4-51dd63808d8e, 'name': SearchDatastore_Task, 'duration_secs': 0.020659} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.186485] env[69328]: DEBUG oslo_concurrency.lockutils [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1111.186749] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] de8e6616-0460-4a6e-918c-a27818da96e2/de8e6616-0460-4a6e-918c-a27818da96e2.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1111.187032] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8070c76d-df2c-4431-b9dd-cc36528fa120 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.195243] env[69328]: DEBUG oslo_vmware.api [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for the task: (returnval){ [ 1111.195243] env[69328]: value = "task-3274086" [ 1111.195243] env[69328]: _type = "Task" [ 1111.195243] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.204145] env[69328]: DEBUG oslo_vmware.api [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274086, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.356069] env[69328]: DEBUG oslo_vmware.api [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3274083, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.362386] env[69328]: DEBUG nova.compute.utils [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1111.370148] env[69328]: DEBUG nova.compute.manager [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1111.370416] env[69328]: DEBUG nova.network.neutron [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1111.372265] env[69328]: DEBUG oslo_vmware.api [None req-a643a3ae-873a-4480-9dda-75e2bfaccc22 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3274085, 'name': ReconfigVM_Task, 'duration_secs': 0.232517} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.372444] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-a643a3ae-873a-4480-9dda-75e2bfaccc22 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653941', 'volume_id': '1afb6c74-c9b8-4214-9cd1-b9f3396261c4', 'name': 'volume-1afb6c74-c9b8-4214-9cd1-b9f3396261c4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '071c1837-9d0b-4b69-b16e-991b300385fb', 'attached_at': '', 'detached_at': '', 'volume_id': '1afb6c74-c9b8-4214-9cd1-b9f3396261c4', 'serial': '1afb6c74-c9b8-4214-9cd1-b9f3396261c4'} {{(pid=69328) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1111.429838] env[69328]: DEBUG nova.policy [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '435c64c503c043a29f90396ad3b070d9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '87581f423dc64e4fb9fe1d51ebc68597', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 1111.449577] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Acquiring lock "refresh_cache-5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1111.449577] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Acquired lock "refresh_cache-5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1111.449577] env[69328]: DEBUG nova.network.neutron [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1111.597620] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bf1c2ae-daa3-4e27-8149-acda3bcb98c1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.605546] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1245aa1-92ae-489a-b5ad-f7c3ac93483f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.636362] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26e50ae5-cd92-4aba-af31-7d4096255955 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.644783] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e78d4f5-a889-416d-8788-2e6538577367 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.662446] env[69328]: DEBUG nova.compute.provider_tree [None req-b144be96-a146-47e0-a389-09f44059da41 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1111.710047] env[69328]: DEBUG oslo_vmware.api [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274086, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.768520] env[69328]: DEBUG nova.network.neutron [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Successfully created port: ce79bad7-6bfd-4645-bc55-71dfc049411d {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1111.789445] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0b4c14fe-8c75-4a2a-bf36-23d30bc0bb70 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Acquiring lock "76210566-12d7-4f6a-afa1-6329e87e0f85" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1111.789752] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0b4c14fe-8c75-4a2a-bf36-23d30bc0bb70 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lock "76210566-12d7-4f6a-afa1-6329e87e0f85" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1111.790080] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0b4c14fe-8c75-4a2a-bf36-23d30bc0bb70 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Acquiring lock "76210566-12d7-4f6a-afa1-6329e87e0f85-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1111.790300] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0b4c14fe-8c75-4a2a-bf36-23d30bc0bb70 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lock "76210566-12d7-4f6a-afa1-6329e87e0f85-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1111.790479] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0b4c14fe-8c75-4a2a-bf36-23d30bc0bb70 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lock "76210566-12d7-4f6a-afa1-6329e87e0f85-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1111.792769] env[69328]: INFO nova.compute.manager [None req-0b4c14fe-8c75-4a2a-bf36-23d30bc0bb70 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Terminating instance [ 1111.858291] env[69328]: DEBUG oslo_vmware.api [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3274083, 'name': ReconfigVM_Task, 'duration_secs': 2.03207} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.858650] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1111.858962] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0ce65d5d-73ac-4b74-a2ef-ea96a605452b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.869473] env[69328]: DEBUG nova.compute.manager [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1111.872617] env[69328]: DEBUG oslo_vmware.api [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 1111.872617] env[69328]: value = "task-3274087" [ 1111.872617] env[69328]: _type = "Task" [ 1111.872617] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.886077] env[69328]: DEBUG oslo_vmware.api [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3274087, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.928299] env[69328]: DEBUG nova.objects.instance [None req-a643a3ae-873a-4480-9dda-75e2bfaccc22 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lazy-loading 'flavor' on Instance uuid 071c1837-9d0b-4b69-b16e-991b300385fb {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1111.985981] env[69328]: DEBUG nova.network.neutron [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1112.138656] env[69328]: DEBUG nova.network.neutron [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Updating instance_info_cache with network_info: [{"id": "f52daaa5-48f6-4553-ac25-4a0103a7736f", "address": "fa:16:3e:a0:32:13", "network": {"id": "23570c12-86fb-49f7-8b67-c216378fd5e7", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-769391723-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e32664bc571e4ebdabcb1b4956a677fc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b107fab-ee71-47db-ad4d-3c6f05546843", "external-id": "cl2-zone-554", "segmentation_id": 554, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf52daaa5-48", "ovs_interfaceid": "f52daaa5-48f6-4553-ac25-4a0103a7736f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1112.165366] env[69328]: DEBUG nova.scheduler.client.report [None req-b144be96-a146-47e0-a389-09f44059da41 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1112.207471] env[69328]: DEBUG oslo_vmware.api [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274086, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.805106} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.207817] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] de8e6616-0460-4a6e-918c-a27818da96e2/de8e6616-0460-4a6e-918c-a27818da96e2.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1112.209293] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1112.209293] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-92ffb318-5ee9-4903-a7f7-9e2a4a358e63 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.216320] env[69328]: DEBUG oslo_vmware.api [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for the task: (returnval){ [ 1112.216320] env[69328]: value = "task-3274088" [ 1112.216320] env[69328]: _type = "Task" [ 1112.216320] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.226340] env[69328]: DEBUG oslo_vmware.api [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274088, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.250457] env[69328]: DEBUG nova.compute.manager [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1112.250995] env[69328]: DEBUG nova.virt.hardware [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1112.251233] env[69328]: DEBUG nova.virt.hardware [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1112.251374] env[69328]: DEBUG nova.virt.hardware [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1112.251557] env[69328]: DEBUG nova.virt.hardware [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1112.251744] env[69328]: DEBUG nova.virt.hardware [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1112.251890] env[69328]: DEBUG nova.virt.hardware [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1112.252117] env[69328]: DEBUG nova.virt.hardware [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1112.252278] env[69328]: DEBUG nova.virt.hardware [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1112.252441] env[69328]: DEBUG nova.virt.hardware [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1112.252604] env[69328]: DEBUG nova.virt.hardware [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1112.252964] env[69328]: DEBUG nova.virt.hardware [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1112.253685] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d16fcc30-af4d-45f9-b10f-81527ac68bcf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.261989] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f1287b4-0418-4c2a-983a-37e82419660c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.298397] env[69328]: DEBUG nova.compute.manager [None req-0b4c14fe-8c75-4a2a-bf36-23d30bc0bb70 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1112.298716] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0b4c14fe-8c75-4a2a-bf36-23d30bc0bb70 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1112.299793] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a0c64c7-96f6-4cc4-b15d-8c3e473ea281 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.308317] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b4c14fe-8c75-4a2a-bf36-23d30bc0bb70 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1112.308905] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2b6ff8b4-db74-418d-930c-a503e0f49747 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.316324] env[69328]: DEBUG oslo_vmware.api [None req-0b4c14fe-8c75-4a2a-bf36-23d30bc0bb70 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 1112.316324] env[69328]: value = "task-3274089" [ 1112.316324] env[69328]: _type = "Task" [ 1112.316324] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.327703] env[69328]: DEBUG oslo_vmware.api [None req-0b4c14fe-8c75-4a2a-bf36-23d30bc0bb70 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274089, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.379522] env[69328]: INFO nova.virt.block_device [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Booting with volume a2de811d-614f-4456-ac21-52535c9e5fd6 at /dev/sda [ 1112.387970] env[69328]: DEBUG oslo_vmware.api [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3274087, 'name': PowerOnVM_Task, 'duration_secs': 0.453565} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.390459] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1112.393576] env[69328]: DEBUG nova.compute.manager [None req-175faa4e-b0f2-4ea5-8356-176c46c9059d tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1112.394198] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01346a8e-7708-42e8-b484-98755b0dfcba {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.424922] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-597451dc-ff2f-4f0c-b32a-669f3ed0f7be {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.438032] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e688dd14-bad9-434f-ac03-83a5e5767f6c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.479377] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-09c91def-81b4-49cf-8b4b-b91129749f2a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.488779] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffbd472d-87c6-4aff-81d3-40a82f1457ac {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.523854] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b542e6d2-c558-4513-860d-a335d9e80519 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.531348] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a811ccd-5694-4d9c-bf89-ddcd8d0e6e9e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.546990] env[69328]: DEBUG nova.virt.block_device [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Updating existing volume attachment record: a5b63ff8-3834-4d14-ac8b-ef4c1bf75147 {{(pid=69328) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1112.641654] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Releasing lock "refresh_cache-5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1112.642118] env[69328]: DEBUG nova.compute.manager [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Instance network_info: |[{"id": "f52daaa5-48f6-4553-ac25-4a0103a7736f", "address": "fa:16:3e:a0:32:13", "network": {"id": "23570c12-86fb-49f7-8b67-c216378fd5e7", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-769391723-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e32664bc571e4ebdabcb1b4956a677fc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b107fab-ee71-47db-ad4d-3c6f05546843", "external-id": "cl2-zone-554", "segmentation_id": 554, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf52daaa5-48", "ovs_interfaceid": "f52daaa5-48f6-4553-ac25-4a0103a7736f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1112.642704] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a0:32:13', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3b107fab-ee71-47db-ad4d-3c6f05546843', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f52daaa5-48f6-4553-ac25-4a0103a7736f', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1112.650762] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Creating folder: Project (e32664bc571e4ebdabcb1b4956a677fc). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1112.651095] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a7032e11-f2b9-4840-ac5e-12d21725277c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.667103] env[69328]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1112.667383] env[69328]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=69328) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1112.667649] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Folder already exists: Project (e32664bc571e4ebdabcb1b4956a677fc). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 1112.667833] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Creating folder: Instances. Parent ref: group-v653929. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1112.668099] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aea2311c-f181-4863-b241-c637e2d373c2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.672430] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b144be96-a146-47e0-a389-09f44059da41 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.821s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1112.674592] env[69328]: DEBUG oslo_concurrency.lockutils [None req-59dd19c1-4d8c-403e-a49d-e42a386fe4a1 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.409s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1112.674842] env[69328]: DEBUG nova.objects.instance [None req-59dd19c1-4d8c-403e-a49d-e42a386fe4a1 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lazy-loading 'resources' on Instance uuid aaa9deb3-9a52-43e3-bf9b-a53922439be2 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1112.687395] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Created folder: Instances in parent group-v653929. [ 1112.687711] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1112.687952] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1112.688230] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a0769855-6aa4-4105-9d18-1d140729e91a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.704610] env[69328]: INFO nova.scheduler.client.report [None req-b144be96-a146-47e0-a389-09f44059da41 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Deleted allocations for instance 65e38a02-880b-46e2-8866-645a9fc17c7a [ 1112.713061] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1112.713061] env[69328]: value = "task-3274092" [ 1112.713061] env[69328]: _type = "Task" [ 1112.713061] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.725335] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274092, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.728148] env[69328]: DEBUG oslo_vmware.api [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274088, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084907} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.728392] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1112.729252] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73b6b481-f49b-4cbc-a55c-e10c7bc297ac {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.753631] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Reconfiguring VM instance instance-0000006c to attach disk [datastore2] de8e6616-0460-4a6e-918c-a27818da96e2/de8e6616-0460-4a6e-918c-a27818da96e2.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1112.754010] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d225d76a-f4c3-435c-8f6e-d90be3ff3645 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.774943] env[69328]: DEBUG oslo_vmware.api [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for the task: (returnval){ [ 1112.774943] env[69328]: value = "task-3274093" [ 1112.774943] env[69328]: _type = "Task" [ 1112.774943] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.783345] env[69328]: DEBUG oslo_vmware.api [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274093, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.828521] env[69328]: DEBUG oslo_vmware.api [None req-0b4c14fe-8c75-4a2a-bf36-23d30bc0bb70 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274089, 'name': PowerOffVM_Task, 'duration_secs': 0.384075} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.829034] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b4c14fe-8c75-4a2a-bf36-23d30bc0bb70 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1112.829262] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0b4c14fe-8c75-4a2a-bf36-23d30bc0bb70 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1112.829567] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0360f79c-bb6d-45fe-a647-fddc47a57f9b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.934010] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0b4c14fe-8c75-4a2a-bf36-23d30bc0bb70 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1112.934288] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0b4c14fe-8c75-4a2a-bf36-23d30bc0bb70 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1112.934476] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b4c14fe-8c75-4a2a-bf36-23d30bc0bb70 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Deleting the datastore file [datastore2] 76210566-12d7-4f6a-afa1-6329e87e0f85 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1112.934751] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e90e612c-3320-43f2-95f2-399e6f2b3e42 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.942025] env[69328]: DEBUG oslo_vmware.api [None req-0b4c14fe-8c75-4a2a-bf36-23d30bc0bb70 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 1112.942025] env[69328]: value = "task-3274095" [ 1112.942025] env[69328]: _type = "Task" [ 1112.942025] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.950940] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a643a3ae-873a-4480-9dda-75e2bfaccc22 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "071c1837-9d0b-4b69-b16e-991b300385fb" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.273s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1112.952009] env[69328]: DEBUG oslo_vmware.api [None req-0b4c14fe-8c75-4a2a-bf36-23d30bc0bb70 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274095, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.967875] env[69328]: DEBUG nova.compute.manager [req-751c347b-4d54-4f61-b1e0-5c94615e3622 req-1a4db06d-bcd4-43f5-9030-c35315a3bbad service nova] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Received event network-changed-f52daaa5-48f6-4553-ac25-4a0103a7736f {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1112.968161] env[69328]: DEBUG nova.compute.manager [req-751c347b-4d54-4f61-b1e0-5c94615e3622 req-1a4db06d-bcd4-43f5-9030-c35315a3bbad service nova] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Refreshing instance network info cache due to event network-changed-f52daaa5-48f6-4553-ac25-4a0103a7736f. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1112.968477] env[69328]: DEBUG oslo_concurrency.lockutils [req-751c347b-4d54-4f61-b1e0-5c94615e3622 req-1a4db06d-bcd4-43f5-9030-c35315a3bbad service nova] Acquiring lock "refresh_cache-5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1112.968559] env[69328]: DEBUG oslo_concurrency.lockutils [req-751c347b-4d54-4f61-b1e0-5c94615e3622 req-1a4db06d-bcd4-43f5-9030-c35315a3bbad service nova] Acquired lock "refresh_cache-5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1112.968726] env[69328]: DEBUG nova.network.neutron [req-751c347b-4d54-4f61-b1e0-5c94615e3622 req-1a4db06d-bcd4-43f5-9030-c35315a3bbad service nova] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Refreshing network info cache for port f52daaa5-48f6-4553-ac25-4a0103a7736f {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1113.212378] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b144be96-a146-47e0-a389-09f44059da41 tempest-ServersAdminTestJSON-766883246 tempest-ServersAdminTestJSON-766883246-project-member] Lock "65e38a02-880b-46e2-8866-645a9fc17c7a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.558s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1113.232925] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274092, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.294584] env[69328]: DEBUG oslo_vmware.api [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274093, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.424685] env[69328]: DEBUG nova.network.neutron [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Successfully updated port: ce79bad7-6bfd-4645-bc55-71dfc049411d {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1113.434936] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc20f1bf-0ffc-4a83-946d-70a8be53de52 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.443467] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd6cc2e1-ab06-41d0-afb0-beea3f7968e0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.455517] env[69328]: DEBUG oslo_vmware.api [None req-0b4c14fe-8c75-4a2a-bf36-23d30bc0bb70 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274095, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.281206} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.483813] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b4c14fe-8c75-4a2a-bf36-23d30bc0bb70 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1113.484084] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0b4c14fe-8c75-4a2a-bf36-23d30bc0bb70 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1113.484408] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0b4c14fe-8c75-4a2a-bf36-23d30bc0bb70 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1113.484504] env[69328]: INFO nova.compute.manager [None req-0b4c14fe-8c75-4a2a-bf36-23d30bc0bb70 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1113.484757] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0b4c14fe-8c75-4a2a-bf36-23d30bc0bb70 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1113.488850] env[69328]: DEBUG nova.compute.manager [-] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1113.488957] env[69328]: DEBUG nova.network.neutron [-] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1113.491095] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-831da5e3-d510-41d7-9f45-7b612773d2fb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.503832] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c02eefe-5da2-4fe7-b982-dcfb6228fa5f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.519229] env[69328]: DEBUG nova.compute.provider_tree [None req-59dd19c1-4d8c-403e-a49d-e42a386fe4a1 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1113.724152] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274092, 'name': CreateVM_Task, 'duration_secs': 0.579742} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.724337] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1113.725067] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'mount_device': '/dev/sda', 'boot_index': 0, 'delete_on_termination': True, 'disk_bus': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653934', 'volume_id': '10778dba-1e87-4365-bb28-98360345c613', 'name': 'volume-10778dba-1e87-4365-bb28-98360345c613', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732', 'attached_at': '', 'detached_at': '', 'volume_id': '10778dba-1e87-4365-bb28-98360345c613', 'serial': '10778dba-1e87-4365-bb28-98360345c613'}, 'guest_format': None, 'device_type': None, 'attachment_id': '1d3177e9-94bc-46ec-8e72-ac3f9fb71e28', 'volume_type': None}], 'swap': None} {{(pid=69328) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1113.725263] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Root volume attach. Driver type: vmdk {{(pid=69328) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1113.726117] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-090537a8-55f8-4bfe-bdbc-01b14643c0da {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.734346] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3b60ca0-d9bb-4090-b797-a54ba77f5645 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.741713] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-690a4c2d-07b4-4af6-bbae-952b2b427745 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.748272] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-919cdc42-b9b8-4e00-9251-3bc00a12e130 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.756624] env[69328]: DEBUG oslo_vmware.api [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Waiting for the task: (returnval){ [ 1113.756624] env[69328]: value = "task-3274096" [ 1113.756624] env[69328]: _type = "Task" [ 1113.756624] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.760617] env[69328]: DEBUG nova.network.neutron [req-751c347b-4d54-4f61-b1e0-5c94615e3622 req-1a4db06d-bcd4-43f5-9030-c35315a3bbad service nova] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Updated VIF entry in instance network info cache for port f52daaa5-48f6-4553-ac25-4a0103a7736f. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1113.760934] env[69328]: DEBUG nova.network.neutron [req-751c347b-4d54-4f61-b1e0-5c94615e3622 req-1a4db06d-bcd4-43f5-9030-c35315a3bbad service nova] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Updating instance_info_cache with network_info: [{"id": "f52daaa5-48f6-4553-ac25-4a0103a7736f", "address": "fa:16:3e:a0:32:13", "network": {"id": "23570c12-86fb-49f7-8b67-c216378fd5e7", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-769391723-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e32664bc571e4ebdabcb1b4956a677fc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b107fab-ee71-47db-ad4d-3c6f05546843", "external-id": "cl2-zone-554", "segmentation_id": 554, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf52daaa5-48", "ovs_interfaceid": "f52daaa5-48f6-4553-ac25-4a0103a7736f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1113.770032] env[69328]: DEBUG oslo_vmware.api [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Task: {'id': task-3274096, 'name': RelocateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.786480] env[69328]: DEBUG oslo_vmware.api [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274093, 'name': ReconfigVM_Task, 'duration_secs': 0.575665} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.786480] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Reconfigured VM instance instance-0000006c to attach disk [datastore2] de8e6616-0460-4a6e-918c-a27818da96e2/de8e6616-0460-4a6e-918c-a27818da96e2.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1113.787062] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-abd173e4-f6da-463d-aad4-23057aa35912 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.793323] env[69328]: DEBUG oslo_vmware.api [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for the task: (returnval){ [ 1113.793323] env[69328]: value = "task-3274097" [ 1113.793323] env[69328]: _type = "Task" [ 1113.793323] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.802273] env[69328]: DEBUG oslo_vmware.api [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274097, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.926572] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "refresh_cache-ee3609ea-0855-47c2-874c-349c80419781" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1113.926776] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquired lock "refresh_cache-ee3609ea-0855-47c2-874c-349c80419781" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1113.926962] env[69328]: DEBUG nova.network.neutron [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1113.998118] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f35c241b-8b70-43a0-91e0-a6dde189ad31 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquiring lock "071c1837-9d0b-4b69-b16e-991b300385fb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1113.998426] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f35c241b-8b70-43a0-91e0-a6dde189ad31 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "071c1837-9d0b-4b69-b16e-991b300385fb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1113.998617] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f35c241b-8b70-43a0-91e0-a6dde189ad31 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquiring lock "071c1837-9d0b-4b69-b16e-991b300385fb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1113.998824] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f35c241b-8b70-43a0-91e0-a6dde189ad31 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "071c1837-9d0b-4b69-b16e-991b300385fb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1113.998993] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f35c241b-8b70-43a0-91e0-a6dde189ad31 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "071c1837-9d0b-4b69-b16e-991b300385fb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1114.001812] env[69328]: INFO nova.compute.manager [None req-f35c241b-8b70-43a0-91e0-a6dde189ad31 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Terminating instance [ 1114.022724] env[69328]: DEBUG nova.scheduler.client.report [None req-59dd19c1-4d8c-403e-a49d-e42a386fe4a1 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1114.048872] env[69328]: INFO nova.compute.manager [None req-9ef580d4-4876-462e-a9fa-d8b39ad72158 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Unrescuing [ 1114.049254] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9ef580d4-4876-462e-a9fa-d8b39ad72158 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquiring lock "refresh_cache-52c87371-4142-40d6-ac68-804aabd9f823" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1114.049478] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9ef580d4-4876-462e-a9fa-d8b39ad72158 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquired lock "refresh_cache-52c87371-4142-40d6-ac68-804aabd9f823" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1114.049691] env[69328]: DEBUG nova.network.neutron [None req-9ef580d4-4876-462e-a9fa-d8b39ad72158 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1114.264093] env[69328]: DEBUG oslo_concurrency.lockutils [req-751c347b-4d54-4f61-b1e0-5c94615e3622 req-1a4db06d-bcd4-43f5-9030-c35315a3bbad service nova] Releasing lock "refresh_cache-5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1114.269049] env[69328]: DEBUG oslo_vmware.api [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Task: {'id': task-3274096, 'name': RelocateVM_Task} progress is 42%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.307269] env[69328]: DEBUG oslo_vmware.api [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274097, 'name': Rename_Task, 'duration_secs': 0.169066} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.307728] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1114.307851] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c9f5ba0c-cd89-40a0-91b8-f4a8275a8c5e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.318777] env[69328]: DEBUG oslo_vmware.api [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for the task: (returnval){ [ 1114.318777] env[69328]: value = "task-3274098" [ 1114.318777] env[69328]: _type = "Task" [ 1114.318777] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.331209] env[69328]: DEBUG oslo_vmware.api [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274098, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.468363] env[69328]: DEBUG nova.network.neutron [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1114.506394] env[69328]: DEBUG nova.compute.manager [None req-f35c241b-8b70-43a0-91e0-a6dde189ad31 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1114.506624] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f35c241b-8b70-43a0-91e0-a6dde189ad31 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1114.507783] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-817089b2-5f57-43db-a688-4f743c976b0d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.527885] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f35c241b-8b70-43a0-91e0-a6dde189ad31 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1114.528746] env[69328]: DEBUG oslo_concurrency.lockutils [None req-59dd19c1-4d8c-403e-a49d-e42a386fe4a1 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.854s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1114.530998] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-681b0545-1af6-4f35-acfe-c6975cb61201 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.537471] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 4.428s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1114.537957] env[69328]: DEBUG nova.objects.instance [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69328) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1114.557267] env[69328]: DEBUG oslo_vmware.api [None req-f35c241b-8b70-43a0-91e0-a6dde189ad31 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for the task: (returnval){ [ 1114.557267] env[69328]: value = "task-3274099" [ 1114.557267] env[69328]: _type = "Task" [ 1114.557267] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.565356] env[69328]: INFO nova.scheduler.client.report [None req-59dd19c1-4d8c-403e-a49d-e42a386fe4a1 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Deleted allocations for instance aaa9deb3-9a52-43e3-bf9b-a53922439be2 [ 1114.575315] env[69328]: DEBUG oslo_vmware.api [None req-f35c241b-8b70-43a0-91e0-a6dde189ad31 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3274099, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.639852] env[69328]: DEBUG nova.compute.manager [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1114.640704] env[69328]: DEBUG nova.virt.hardware [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1114.641645] env[69328]: DEBUG nova.virt.hardware [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1114.641645] env[69328]: DEBUG nova.virt.hardware [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1114.641645] env[69328]: DEBUG nova.virt.hardware [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1114.641645] env[69328]: DEBUG nova.virt.hardware [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1114.642673] env[69328]: DEBUG nova.virt.hardware [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1114.642673] env[69328]: DEBUG nova.virt.hardware [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1114.642673] env[69328]: DEBUG nova.virt.hardware [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1114.642673] env[69328]: DEBUG nova.virt.hardware [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1114.642673] env[69328]: DEBUG nova.virt.hardware [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1114.643027] env[69328]: DEBUG nova.virt.hardware [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1114.643777] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-789cd02c-0f0c-4bcc-ac9e-08064be1336c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.656929] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b505b35a-016b-4da6-9a5e-bb14c1706c90 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.772438] env[69328]: DEBUG nova.compute.manager [req-bf67945f-4c48-470f-808e-a1f6cabe84d5 req-4bb44166-33dc-4a86-b91f-6515cf4ff181 service nova] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Received event network-vif-deleted-a3cab44b-0572-4007-bab9-e84ba084f70a {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1114.773999] env[69328]: INFO nova.compute.manager [req-bf67945f-4c48-470f-808e-a1f6cabe84d5 req-4bb44166-33dc-4a86-b91f-6515cf4ff181 service nova] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Neutron deleted interface a3cab44b-0572-4007-bab9-e84ba084f70a; detaching it from the instance and deleting it from the info cache [ 1114.773999] env[69328]: DEBUG nova.network.neutron [req-bf67945f-4c48-470f-808e-a1f6cabe84d5 req-4bb44166-33dc-4a86-b91f-6515cf4ff181 service nova] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1114.777782] env[69328]: DEBUG oslo_vmware.api [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Task: {'id': task-3274096, 'name': RelocateVM_Task} progress is 54%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.783670] env[69328]: DEBUG nova.network.neutron [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Updating instance_info_cache with network_info: [{"id": "ce79bad7-6bfd-4645-bc55-71dfc049411d", "address": "fa:16:3e:c6:38:a6", "network": {"id": "ce3bec03-01f9-4ac9-80e4-bfb944c0bb66", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-545997027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87581f423dc64e4fb9fe1d51ebc68597", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce79bad7-6b", "ovs_interfaceid": "ce79bad7-6bfd-4645-bc55-71dfc049411d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1114.832089] env[69328]: DEBUG oslo_vmware.api [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274098, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.990440] env[69328]: DEBUG nova.network.neutron [None req-9ef580d4-4876-462e-a9fa-d8b39ad72158 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Updating instance_info_cache with network_info: [{"id": "7da3de27-ee87-400f-ae26-a3a6995a8363", "address": "fa:16:3e:91:9b:b5", "network": {"id": "bd41ac10-1850-45a2-8e46-6ebdca3d8e13", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-766393176-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.150", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efd0e2d2f9ba4416bd8fd08dad912465", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e614f8e-6b11-4b6b-a421-904bca6acd91", "external-id": "nsx-vlan-transportzone-923", "segmentation_id": 923, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7da3de27-ee", "ovs_interfaceid": "7da3de27-ee87-400f-ae26-a3a6995a8363", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1115.073247] env[69328]: DEBUG oslo_vmware.api [None req-f35c241b-8b70-43a0-91e0-a6dde189ad31 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3274099, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.079486] env[69328]: DEBUG oslo_concurrency.lockutils [None req-59dd19c1-4d8c-403e-a49d-e42a386fe4a1 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "aaa9deb3-9a52-43e3-bf9b-a53922439be2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.338s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1115.154228] env[69328]: DEBUG nova.compute.manager [req-4fa5dc9f-691d-4a3c-8888-fd811e08ac7c req-e91a7169-56c0-40e5-a5f6-8323e714c571 service nova] [instance: ee3609ea-0855-47c2-874c-349c80419781] Received event network-vif-plugged-ce79bad7-6bfd-4645-bc55-71dfc049411d {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1115.154450] env[69328]: DEBUG oslo_concurrency.lockutils [req-4fa5dc9f-691d-4a3c-8888-fd811e08ac7c req-e91a7169-56c0-40e5-a5f6-8323e714c571 service nova] Acquiring lock "ee3609ea-0855-47c2-874c-349c80419781-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1115.154672] env[69328]: DEBUG oslo_concurrency.lockutils [req-4fa5dc9f-691d-4a3c-8888-fd811e08ac7c req-e91a7169-56c0-40e5-a5f6-8323e714c571 service nova] Lock "ee3609ea-0855-47c2-874c-349c80419781-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1115.154885] env[69328]: DEBUG oslo_concurrency.lockutils [req-4fa5dc9f-691d-4a3c-8888-fd811e08ac7c req-e91a7169-56c0-40e5-a5f6-8323e714c571 service nova] Lock "ee3609ea-0855-47c2-874c-349c80419781-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1115.155192] env[69328]: DEBUG nova.compute.manager [req-4fa5dc9f-691d-4a3c-8888-fd811e08ac7c req-e91a7169-56c0-40e5-a5f6-8323e714c571 service nova] [instance: ee3609ea-0855-47c2-874c-349c80419781] No waiting events found dispatching network-vif-plugged-ce79bad7-6bfd-4645-bc55-71dfc049411d {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1115.155379] env[69328]: WARNING nova.compute.manager [req-4fa5dc9f-691d-4a3c-8888-fd811e08ac7c req-e91a7169-56c0-40e5-a5f6-8323e714c571 service nova] [instance: ee3609ea-0855-47c2-874c-349c80419781] Received unexpected event network-vif-plugged-ce79bad7-6bfd-4645-bc55-71dfc049411d for instance with vm_state building and task_state spawning. [ 1115.155699] env[69328]: DEBUG nova.compute.manager [req-4fa5dc9f-691d-4a3c-8888-fd811e08ac7c req-e91a7169-56c0-40e5-a5f6-8323e714c571 service nova] [instance: ee3609ea-0855-47c2-874c-349c80419781] Received event network-changed-ce79bad7-6bfd-4645-bc55-71dfc049411d {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1115.155699] env[69328]: DEBUG nova.compute.manager [req-4fa5dc9f-691d-4a3c-8888-fd811e08ac7c req-e91a7169-56c0-40e5-a5f6-8323e714c571 service nova] [instance: ee3609ea-0855-47c2-874c-349c80419781] Refreshing instance network info cache due to event network-changed-ce79bad7-6bfd-4645-bc55-71dfc049411d. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1115.155865] env[69328]: DEBUG oslo_concurrency.lockutils [req-4fa5dc9f-691d-4a3c-8888-fd811e08ac7c req-e91a7169-56c0-40e5-a5f6-8323e714c571 service nova] Acquiring lock "refresh_cache-ee3609ea-0855-47c2-874c-349c80419781" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1115.196684] env[69328]: DEBUG nova.network.neutron [-] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1115.271292] env[69328]: DEBUG oslo_vmware.api [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Task: {'id': task-3274096, 'name': RelocateVM_Task} progress is 67%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.279623] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8c83635f-275d-4b66-9ed1-fc811cc47fbe {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.286953] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Releasing lock "refresh_cache-ee3609ea-0855-47c2-874c-349c80419781" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1115.287287] env[69328]: DEBUG nova.compute.manager [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Instance network_info: |[{"id": "ce79bad7-6bfd-4645-bc55-71dfc049411d", "address": "fa:16:3e:c6:38:a6", "network": {"id": "ce3bec03-01f9-4ac9-80e4-bfb944c0bb66", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-545997027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87581f423dc64e4fb9fe1d51ebc68597", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce79bad7-6b", "ovs_interfaceid": "ce79bad7-6bfd-4645-bc55-71dfc049411d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1115.288305] env[69328]: DEBUG oslo_concurrency.lockutils [req-4fa5dc9f-691d-4a3c-8888-fd811e08ac7c req-e91a7169-56c0-40e5-a5f6-8323e714c571 service nova] Acquired lock "refresh_cache-ee3609ea-0855-47c2-874c-349c80419781" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1115.288723] env[69328]: DEBUG nova.network.neutron [req-4fa5dc9f-691d-4a3c-8888-fd811e08ac7c req-e91a7169-56c0-40e5-a5f6-8323e714c571 service nova] [instance: ee3609ea-0855-47c2-874c-349c80419781] Refreshing network info cache for port ce79bad7-6bfd-4645-bc55-71dfc049411d {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1115.290096] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c6:38:a6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1768af3d-3317-4ef5-b484-0c2707d63de7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ce79bad7-6bfd-4645-bc55-71dfc049411d', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1115.299032] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1115.301074] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ee3609ea-0855-47c2-874c-349c80419781] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1115.301391] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e696248a-ee70-4154-8911-84378eae8207 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.320918] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eac3f731-95b7-4b4a-9984-593638d03cbc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.340737] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1115.340737] env[69328]: value = "task-3274100" [ 1115.340737] env[69328]: _type = "Task" [ 1115.340737] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.345099] env[69328]: DEBUG oslo_vmware.api [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274098, 'name': PowerOnVM_Task, 'duration_secs': 0.810282} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.348616] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1115.348849] env[69328]: INFO nova.compute.manager [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Took 9.07 seconds to spawn the instance on the hypervisor. [ 1115.349043] env[69328]: DEBUG nova.compute.manager [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1115.349912] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8416a83-2550-4446-8afc-698c99a80b8b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.366964] env[69328]: DEBUG nova.compute.manager [req-bf67945f-4c48-470f-808e-a1f6cabe84d5 req-4bb44166-33dc-4a86-b91f-6515cf4ff181 service nova] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Detach interface failed, port_id=a3cab44b-0572-4007-bab9-e84ba084f70a, reason: Instance 76210566-12d7-4f6a-afa1-6329e87e0f85 could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1115.374019] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274100, 'name': CreateVM_Task} progress is 10%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.496961] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9ef580d4-4876-462e-a9fa-d8b39ad72158 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Releasing lock "refresh_cache-52c87371-4142-40d6-ac68-804aabd9f823" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1115.496961] env[69328]: DEBUG nova.objects.instance [None req-9ef580d4-4876-462e-a9fa-d8b39ad72158 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Lazy-loading 'flavor' on Instance uuid 52c87371-4142-40d6-ac68-804aabd9f823 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1115.543915] env[69328]: DEBUG oslo_concurrency.lockutils [None req-265b401f-559d-40e3-bb74-e1b4d81ad73a tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Acquiring lock "275ef1ed-8e60-4151-b548-e22e5bd8efe2" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1115.544269] env[69328]: DEBUG oslo_concurrency.lockutils [None req-265b401f-559d-40e3-bb74-e1b4d81ad73a tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Lock "275ef1ed-8e60-4151-b548-e22e5bd8efe2" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1115.550732] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e77e2990-a51d-4b80-863e-bab2a9fe99ed tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1115.573550] env[69328]: DEBUG oslo_vmware.api [None req-f35c241b-8b70-43a0-91e0-a6dde189ad31 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3274099, 'name': PowerOffVM_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.700785] env[69328]: INFO nova.compute.manager [-] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Took 2.21 seconds to deallocate network for instance. [ 1115.775750] env[69328]: DEBUG oslo_vmware.api [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Task: {'id': task-3274096, 'name': RelocateVM_Task} progress is 78%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.865651] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274100, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.892180] env[69328]: INFO nova.compute.manager [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Took 14.48 seconds to build instance. [ 1116.002475] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67d3709f-dd35-4ec7-862a-9b672a0e3316 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.032622] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ef580d4-4876-462e-a9fa-d8b39ad72158 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1116.036195] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-53b51f57-6d2d-4ae5-9f8e-e26bc5a02087 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.048235] env[69328]: DEBUG nova.compute.utils [None req-265b401f-559d-40e3-bb74-e1b4d81ad73a tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1116.049610] env[69328]: DEBUG oslo_vmware.api [None req-9ef580d4-4876-462e-a9fa-d8b39ad72158 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 1116.049610] env[69328]: value = "task-3274101" [ 1116.049610] env[69328]: _type = "Task" [ 1116.049610] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.063266] env[69328]: DEBUG oslo_vmware.api [None req-9ef580d4-4876-462e-a9fa-d8b39ad72158 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3274101, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.074747] env[69328]: DEBUG oslo_vmware.api [None req-f35c241b-8b70-43a0-91e0-a6dde189ad31 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3274099, 'name': PowerOffVM_Task, 'duration_secs': 1.161339} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.075091] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f35c241b-8b70-43a0-91e0-a6dde189ad31 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1116.075282] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f35c241b-8b70-43a0-91e0-a6dde189ad31 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1116.075586] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6a73a0b0-01b0-4b13-919b-0846a60343e6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.208998] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0b4c14fe-8c75-4a2a-bf36-23d30bc0bb70 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1116.209435] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0b4c14fe-8c75-4a2a-bf36-23d30bc0bb70 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1116.209824] env[69328]: DEBUG nova.objects.instance [None req-0b4c14fe-8c75-4a2a-bf36-23d30bc0bb70 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lazy-loading 'resources' on Instance uuid 76210566-12d7-4f6a-afa1-6329e87e0f85 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1116.229500] env[69328]: DEBUG nova.network.neutron [req-4fa5dc9f-691d-4a3c-8888-fd811e08ac7c req-e91a7169-56c0-40e5-a5f6-8323e714c571 service nova] [instance: ee3609ea-0855-47c2-874c-349c80419781] Updated VIF entry in instance network info cache for port ce79bad7-6bfd-4645-bc55-71dfc049411d. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1116.229871] env[69328]: DEBUG nova.network.neutron [req-4fa5dc9f-691d-4a3c-8888-fd811e08ac7c req-e91a7169-56c0-40e5-a5f6-8323e714c571 service nova] [instance: ee3609ea-0855-47c2-874c-349c80419781] Updating instance_info_cache with network_info: [{"id": "ce79bad7-6bfd-4645-bc55-71dfc049411d", "address": "fa:16:3e:c6:38:a6", "network": {"id": "ce3bec03-01f9-4ac9-80e4-bfb944c0bb66", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-545997027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87581f423dc64e4fb9fe1d51ebc68597", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce79bad7-6b", "ovs_interfaceid": "ce79bad7-6bfd-4645-bc55-71dfc049411d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1116.253182] env[69328]: DEBUG oslo_concurrency.lockutils [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "32b9acbc-35a0-4d67-ac74-ef46c45fa0b9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1116.254462] env[69328]: DEBUG oslo_concurrency.lockutils [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "32b9acbc-35a0-4d67-ac74-ef46c45fa0b9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1116.273928] env[69328]: DEBUG oslo_vmware.api [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Task: {'id': task-3274096, 'name': RelocateVM_Task} progress is 92%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.335306] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f35c241b-8b70-43a0-91e0-a6dde189ad31 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1116.335306] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f35c241b-8b70-43a0-91e0-a6dde189ad31 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1116.335306] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-f35c241b-8b70-43a0-91e0-a6dde189ad31 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Deleting the datastore file [datastore1] 071c1837-9d0b-4b69-b16e-991b300385fb {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1116.335306] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7896ccf7-8791-4502-84b0-c06c5ff4205d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.345174] env[69328]: DEBUG oslo_vmware.api [None req-f35c241b-8b70-43a0-91e0-a6dde189ad31 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for the task: (returnval){ [ 1116.345174] env[69328]: value = "task-3274103" [ 1116.345174] env[69328]: _type = "Task" [ 1116.345174] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.357037] env[69328]: DEBUG oslo_vmware.api [None req-f35c241b-8b70-43a0-91e0-a6dde189ad31 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3274103, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.360116] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274100, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.395593] env[69328]: DEBUG oslo_concurrency.lockutils [None req-970f31b5-cc34-4796-ad10-7697a695388d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "de8e6616-0460-4a6e-918c-a27818da96e2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.992s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1116.551647] env[69328]: DEBUG oslo_concurrency.lockutils [None req-265b401f-559d-40e3-bb74-e1b4d81ad73a tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Lock "275ef1ed-8e60-4151-b548-e22e5bd8efe2" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1116.568369] env[69328]: DEBUG oslo_vmware.api [None req-9ef580d4-4876-462e-a9fa-d8b39ad72158 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3274101, 'name': PowerOffVM_Task, 'duration_secs': 0.44161} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.568716] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ef580d4-4876-462e-a9fa-d8b39ad72158 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1116.574020] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ef580d4-4876-462e-a9fa-d8b39ad72158 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Reconfiguring VM instance instance-00000056 to detach disk 2002 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1116.574333] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6898cf08-6ed7-4433-bdba-39d62215c77a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.594084] env[69328]: DEBUG oslo_vmware.api [None req-9ef580d4-4876-462e-a9fa-d8b39ad72158 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 1116.594084] env[69328]: value = "task-3274104" [ 1116.594084] env[69328]: _type = "Task" [ 1116.594084] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.602932] env[69328]: DEBUG oslo_vmware.api [None req-9ef580d4-4876-462e-a9fa-d8b39ad72158 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3274104, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.733630] env[69328]: DEBUG oslo_concurrency.lockutils [req-4fa5dc9f-691d-4a3c-8888-fd811e08ac7c req-e91a7169-56c0-40e5-a5f6-8323e714c571 service nova] Releasing lock "refresh_cache-ee3609ea-0855-47c2-874c-349c80419781" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1116.757685] env[69328]: DEBUG nova.compute.manager [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1116.772858] env[69328]: DEBUG oslo_vmware.api [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Task: {'id': task-3274096, 'name': RelocateVM_Task} progress is 97%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.859749] env[69328]: DEBUG oslo_vmware.api [None req-f35c241b-8b70-43a0-91e0-a6dde189ad31 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3274103, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.865443] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274100, 'name': CreateVM_Task, 'duration_secs': 1.060857} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.865807] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ee3609ea-0855-47c2-874c-349c80419781] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1116.866621] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'mount_device': '/dev/sda', 'boot_index': 0, 'delete_on_termination': True, 'disk_bus': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653933', 'volume_id': 'a2de811d-614f-4456-ac21-52535c9e5fd6', 'name': 'volume-a2de811d-614f-4456-ac21-52535c9e5fd6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ee3609ea-0855-47c2-874c-349c80419781', 'attached_at': '', 'detached_at': '', 'volume_id': 'a2de811d-614f-4456-ac21-52535c9e5fd6', 'serial': 'a2de811d-614f-4456-ac21-52535c9e5fd6'}, 'guest_format': None, 'device_type': None, 'attachment_id': 'a5b63ff8-3834-4d14-ac8b-ef4c1bf75147', 'volume_type': None}], 'swap': None} {{(pid=69328) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1116.866759] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Root volume attach. Driver type: vmdk {{(pid=69328) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1116.867525] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82c50e27-06e7-46cc-9557-594f5db90e11 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.875575] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f57a607-b3ac-4763-b5c3-e37b2bf9c96b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.886512] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-067b5268-6a26-47bd-84a5-0f03dafacf8a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.897698] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-ea0b5575-cfb3-4fa7-950d-a64b9957cbb3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.905831] env[69328]: DEBUG oslo_vmware.api [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 1116.905831] env[69328]: value = "task-3274105" [ 1116.905831] env[69328]: _type = "Task" [ 1116.905831] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.919045] env[69328]: DEBUG oslo_vmware.api [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3274105, 'name': RelocateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.004348] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1e001ea-697f-4332-98bc-182fabcd1dc7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.013869] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae7d4d77-ef6e-47be-830e-f23782f73060 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.068123] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b232102e-572b-4302-b7c5-b5713511e50f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.080786] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27ddc11a-325f-49ab-a48c-eab8ae554e08 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.109080] env[69328]: DEBUG nova.compute.provider_tree [None req-0b4c14fe-8c75-4a2a-bf36-23d30bc0bb70 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1117.125692] env[69328]: DEBUG oslo_vmware.api [None req-9ef580d4-4876-462e-a9fa-d8b39ad72158 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3274104, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.276479] env[69328]: DEBUG oslo_vmware.api [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Task: {'id': task-3274096, 'name': RelocateVM_Task} progress is 97%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.288104] env[69328]: DEBUG oslo_concurrency.lockutils [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1117.359127] env[69328]: DEBUG oslo_vmware.api [None req-f35c241b-8b70-43a0-91e0-a6dde189ad31 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3274103, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.547354} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.359343] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-f35c241b-8b70-43a0-91e0-a6dde189ad31 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1117.359562] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f35c241b-8b70-43a0-91e0-a6dde189ad31 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1117.359747] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f35c241b-8b70-43a0-91e0-a6dde189ad31 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1117.359925] env[69328]: INFO nova.compute.manager [None req-f35c241b-8b70-43a0-91e0-a6dde189ad31 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Took 2.85 seconds to destroy the instance on the hypervisor. [ 1117.360239] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f35c241b-8b70-43a0-91e0-a6dde189ad31 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1117.360462] env[69328]: DEBUG nova.compute.manager [-] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1117.360560] env[69328]: DEBUG nova.network.neutron [-] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1117.419669] env[69328]: DEBUG oslo_vmware.api [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3274105, 'name': RelocateVM_Task} progress is 38%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.464574] env[69328]: DEBUG nova.compute.manager [req-2627bfeb-3397-4adc-8694-bb2d00c9b82e req-e60825bf-e6a2-4818-b21d-13902f8b8b49 service nova] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Received event network-changed-13436ecc-0cb3-4c13-bf18-f81195196ffd {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1117.464775] env[69328]: DEBUG nova.compute.manager [req-2627bfeb-3397-4adc-8694-bb2d00c9b82e req-e60825bf-e6a2-4818-b21d-13902f8b8b49 service nova] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Refreshing instance network info cache due to event network-changed-13436ecc-0cb3-4c13-bf18-f81195196ffd. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1117.465058] env[69328]: DEBUG oslo_concurrency.lockutils [req-2627bfeb-3397-4adc-8694-bb2d00c9b82e req-e60825bf-e6a2-4818-b21d-13902f8b8b49 service nova] Acquiring lock "refresh_cache-de8e6616-0460-4a6e-918c-a27818da96e2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1117.465183] env[69328]: DEBUG oslo_concurrency.lockutils [req-2627bfeb-3397-4adc-8694-bb2d00c9b82e req-e60825bf-e6a2-4818-b21d-13902f8b8b49 service nova] Acquired lock "refresh_cache-de8e6616-0460-4a6e-918c-a27818da96e2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1117.465322] env[69328]: DEBUG nova.network.neutron [req-2627bfeb-3397-4adc-8694-bb2d00c9b82e req-e60825bf-e6a2-4818-b21d-13902f8b8b49 service nova] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Refreshing network info cache for port 13436ecc-0cb3-4c13-bf18-f81195196ffd {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1117.618608] env[69328]: DEBUG nova.scheduler.client.report [None req-0b4c14fe-8c75-4a2a-bf36-23d30bc0bb70 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1117.626440] env[69328]: DEBUG oslo_vmware.api [None req-9ef580d4-4876-462e-a9fa-d8b39ad72158 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3274104, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.659754] env[69328]: DEBUG oslo_concurrency.lockutils [None req-265b401f-559d-40e3-bb74-e1b4d81ad73a tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Acquiring lock "275ef1ed-8e60-4151-b548-e22e5bd8efe2" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1117.660489] env[69328]: DEBUG oslo_concurrency.lockutils [None req-265b401f-559d-40e3-bb74-e1b4d81ad73a tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Lock "275ef1ed-8e60-4151-b548-e22e5bd8efe2" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1117.661191] env[69328]: INFO nova.compute.manager [None req-265b401f-559d-40e3-bb74-e1b4d81ad73a tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Attaching volume fef2b051-1a59-44f4-891c-513cd1a55f11 to /dev/sdb [ 1117.707492] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33b9eade-bdfb-4a04-b0bc-c76f2140b0f0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.720620] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b95178f2-2310-4c00-9423-2c301873c3b7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.742816] env[69328]: DEBUG nova.virt.block_device [None req-265b401f-559d-40e3-bb74-e1b4d81ad73a tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Updating existing volume attachment record: a731e876-c8cd-4541-aca2-f9de6be4d036 {{(pid=69328) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1117.774428] env[69328]: DEBUG oslo_vmware.api [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Task: {'id': task-3274096, 'name': RelocateVM_Task} progress is 98%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.919858] env[69328]: DEBUG oslo_vmware.api [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3274105, 'name': RelocateVM_Task} progress is 51%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.125559] env[69328]: DEBUG oslo_vmware.api [None req-9ef580d4-4876-462e-a9fa-d8b39ad72158 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3274104, 'name': ReconfigVM_Task, 'duration_secs': 1.281196} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.126164] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ef580d4-4876-462e-a9fa-d8b39ad72158 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Reconfigured VM instance instance-00000056 to detach disk 2002 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1118.126288] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ef580d4-4876-462e-a9fa-d8b39ad72158 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1118.128117] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c4fa406c-4555-4570-9816-09240505fbe6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.129221] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0b4c14fe-8c75-4a2a-bf36-23d30bc0bb70 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.920s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1118.133469] env[69328]: DEBUG oslo_concurrency.lockutils [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.844s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1118.136864] env[69328]: INFO nova.compute.claims [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1118.150688] env[69328]: DEBUG oslo_vmware.api [None req-9ef580d4-4876-462e-a9fa-d8b39ad72158 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 1118.150688] env[69328]: value = "task-3274109" [ 1118.150688] env[69328]: _type = "Task" [ 1118.150688] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.155691] env[69328]: INFO nova.scheduler.client.report [None req-0b4c14fe-8c75-4a2a-bf36-23d30bc0bb70 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Deleted allocations for instance 76210566-12d7-4f6a-afa1-6329e87e0f85 [ 1118.164407] env[69328]: DEBUG oslo_vmware.api [None req-9ef580d4-4876-462e-a9fa-d8b39ad72158 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3274109, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.281477] env[69328]: DEBUG oslo_vmware.api [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Task: {'id': task-3274096, 'name': RelocateVM_Task} progress is 98%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.426249] env[69328]: DEBUG oslo_vmware.api [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3274105, 'name': RelocateVM_Task} progress is 63%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.594404] env[69328]: DEBUG nova.compute.manager [req-790b523d-cdd4-4dad-8a2d-bdb783981ec1 req-423b07ef-6e8d-4a9f-b031-18a255b724ef service nova] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Received event network-vif-deleted-61188e10-aa7e-4ec8-99f4-bc6a8380b3be {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1118.595898] env[69328]: INFO nova.compute.manager [req-790b523d-cdd4-4dad-8a2d-bdb783981ec1 req-423b07ef-6e8d-4a9f-b031-18a255b724ef service nova] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Neutron deleted interface 61188e10-aa7e-4ec8-99f4-bc6a8380b3be; detaching it from the instance and deleting it from the info cache [ 1118.595898] env[69328]: DEBUG nova.network.neutron [req-790b523d-cdd4-4dad-8a2d-bdb783981ec1 req-423b07ef-6e8d-4a9f-b031-18a255b724ef service nova] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1118.659060] env[69328]: DEBUG nova.network.neutron [req-2627bfeb-3397-4adc-8694-bb2d00c9b82e req-e60825bf-e6a2-4818-b21d-13902f8b8b49 service nova] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Updated VIF entry in instance network info cache for port 13436ecc-0cb3-4c13-bf18-f81195196ffd. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1118.659832] env[69328]: DEBUG nova.network.neutron [req-2627bfeb-3397-4adc-8694-bb2d00c9b82e req-e60825bf-e6a2-4818-b21d-13902f8b8b49 service nova] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Updating instance_info_cache with network_info: [{"id": "13436ecc-0cb3-4c13-bf18-f81195196ffd", "address": "fa:16:3e:2e:1b:14", "network": {"id": "aed15283-4a79-4e99-8b6c-49cf754138de", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1271042528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30209bc93a4042488f15c73b7e4733d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13436ecc-0c", "ovs_interfaceid": "13436ecc-0cb3-4c13-bf18-f81195196ffd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1118.674395] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "33583ef3-252c-45d4-a514-5646f98c5f45" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1118.674802] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "33583ef3-252c-45d4-a514-5646f98c5f45" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1118.676271] env[69328]: DEBUG oslo_vmware.api [None req-9ef580d4-4876-462e-a9fa-d8b39ad72158 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3274109, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.677359] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0b4c14fe-8c75-4a2a-bf36-23d30bc0bb70 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lock "76210566-12d7-4f6a-afa1-6329e87e0f85" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.888s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1118.777038] env[69328]: DEBUG oslo_vmware.api [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Task: {'id': task-3274096, 'name': RelocateVM_Task, 'duration_secs': 4.578552} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.777235] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Volume attach. Driver type: vmdk {{(pid=69328) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1118.777434] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653934', 'volume_id': '10778dba-1e87-4365-bb28-98360345c613', 'name': 'volume-10778dba-1e87-4365-bb28-98360345c613', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732', 'attached_at': '', 'detached_at': '', 'volume_id': '10778dba-1e87-4365-bb28-98360345c613', 'serial': '10778dba-1e87-4365-bb28-98360345c613'} {{(pid=69328) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1118.778352] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e6557b5-e2c8-4559-a138-10d059e0fffd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.798278] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1191142-bfbb-4c53-bc50-7052e5cb7d0f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.825126] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Reconfiguring VM instance instance-0000006d to attach disk [datastore2] volume-10778dba-1e87-4365-bb28-98360345c613/volume-10778dba-1e87-4365-bb28-98360345c613.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1118.825521] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9f3aa9b0-3da4-4e0b-9f02-92487c30b943 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.852539] env[69328]: DEBUG oslo_vmware.api [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Waiting for the task: (returnval){ [ 1118.852539] env[69328]: value = "task-3274110" [ 1118.852539] env[69328]: _type = "Task" [ 1118.852539] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.863354] env[69328]: DEBUG oslo_vmware.api [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Task: {'id': task-3274110, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.922418] env[69328]: DEBUG oslo_vmware.api [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3274105, 'name': RelocateVM_Task} progress is 76%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.990162] env[69328]: DEBUG nova.network.neutron [-] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1119.098730] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0aedc387-27a0-43e6-8805-a108db966c09 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.111091] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-930e7c8e-6d01-4ecb-9303-30b9fc97f15c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.155198] env[69328]: DEBUG nova.compute.manager [req-790b523d-cdd4-4dad-8a2d-bdb783981ec1 req-423b07ef-6e8d-4a9f-b031-18a255b724ef service nova] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Detach interface failed, port_id=61188e10-aa7e-4ec8-99f4-bc6a8380b3be, reason: Instance 071c1837-9d0b-4b69-b16e-991b300385fb could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1119.169244] env[69328]: DEBUG oslo_vmware.api [None req-9ef580d4-4876-462e-a9fa-d8b39ad72158 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3274109, 'name': PowerOnVM_Task, 'duration_secs': 0.909406} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.169599] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ef580d4-4876-462e-a9fa-d8b39ad72158 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1119.169879] env[69328]: DEBUG nova.compute.manager [None req-9ef580d4-4876-462e-a9fa-d8b39ad72158 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1119.170796] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeb6ce2e-2c0d-4028-9078-fbb7f98d49dd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.173862] env[69328]: DEBUG oslo_concurrency.lockutils [req-2627bfeb-3397-4adc-8694-bb2d00c9b82e req-e60825bf-e6a2-4818-b21d-13902f8b8b49 service nova] Releasing lock "refresh_cache-de8e6616-0460-4a6e-918c-a27818da96e2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1119.180065] env[69328]: DEBUG nova.compute.manager [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1119.367811] env[69328]: DEBUG oslo_vmware.api [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Task: {'id': task-3274110, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.395710] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8417446a-b59a-4143-8f6a-0de1b51b7884 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.406741] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12e0f8d9-a28c-4b14-a1c3-d54f0ee5e6bb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.423256] env[69328]: DEBUG oslo_vmware.api [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3274105, 'name': RelocateVM_Task} progress is 89%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.451124] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aece7aea-fb60-4b88-bfc9-b970bbf54d06 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.465981] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c5741d9-d0be-40fa-a322-aa8578ed97ed {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.485221] env[69328]: DEBUG nova.compute.provider_tree [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1119.493070] env[69328]: INFO nova.compute.manager [-] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Took 2.13 seconds to deallocate network for instance. [ 1119.717150] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1119.869036] env[69328]: DEBUG oslo_vmware.api [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Task: {'id': task-3274110, 'name': ReconfigVM_Task, 'duration_secs': 0.764744} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.869036] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Reconfigured VM instance instance-0000006d to attach disk [datastore2] volume-10778dba-1e87-4365-bb28-98360345c613/volume-10778dba-1e87-4365-bb28-98360345c613.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1119.874350] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a5d5388c-d1b1-4a68-9256-3e32354f4fad {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.889912] env[69328]: DEBUG oslo_vmware.api [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Waiting for the task: (returnval){ [ 1119.889912] env[69328]: value = "task-3274111" [ 1119.889912] env[69328]: _type = "Task" [ 1119.889912] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.899344] env[69328]: DEBUG oslo_vmware.api [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Task: {'id': task-3274111, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.919896] env[69328]: DEBUG oslo_vmware.api [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3274105, 'name': RelocateVM_Task} progress is 97%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.988707] env[69328]: DEBUG nova.scheduler.client.report [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1120.005183] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f35c241b-8b70-43a0-91e0-a6dde189ad31 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1120.216642] env[69328]: DEBUG nova.compute.manager [req-a386c140-63a5-45a0-a877-9aff78bcdeba req-ab1f8db8-9e67-404d-a985-68c5bdbecc2e service nova] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Received event network-changed-7da3de27-ee87-400f-ae26-a3a6995a8363 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1120.216881] env[69328]: DEBUG nova.compute.manager [req-a386c140-63a5-45a0-a877-9aff78bcdeba req-ab1f8db8-9e67-404d-a985-68c5bdbecc2e service nova] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Refreshing instance network info cache due to event network-changed-7da3de27-ee87-400f-ae26-a3a6995a8363. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1120.217209] env[69328]: DEBUG oslo_concurrency.lockutils [req-a386c140-63a5-45a0-a877-9aff78bcdeba req-ab1f8db8-9e67-404d-a985-68c5bdbecc2e service nova] Acquiring lock "refresh_cache-52c87371-4142-40d6-ac68-804aabd9f823" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1120.218154] env[69328]: DEBUG oslo_concurrency.lockutils [req-a386c140-63a5-45a0-a877-9aff78bcdeba req-ab1f8db8-9e67-404d-a985-68c5bdbecc2e service nova] Acquired lock "refresh_cache-52c87371-4142-40d6-ac68-804aabd9f823" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1120.218154] env[69328]: DEBUG nova.network.neutron [req-a386c140-63a5-45a0-a877-9aff78bcdeba req-ab1f8db8-9e67-404d-a985-68c5bdbecc2e service nova] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Refreshing network info cache for port 7da3de27-ee87-400f-ae26-a3a6995a8363 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1120.403667] env[69328]: DEBUG oslo_vmware.api [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Task: {'id': task-3274111, 'name': ReconfigVM_Task, 'duration_secs': 0.491005} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.404025] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653934', 'volume_id': '10778dba-1e87-4365-bb28-98360345c613', 'name': 'volume-10778dba-1e87-4365-bb28-98360345c613', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732', 'attached_at': '', 'detached_at': '', 'volume_id': '10778dba-1e87-4365-bb28-98360345c613', 'serial': '10778dba-1e87-4365-bb28-98360345c613'} {{(pid=69328) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1120.404598] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b255b902-f621-48f8-8a60-47d287aa7e68 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.411837] env[69328]: DEBUG oslo_vmware.api [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Waiting for the task: (returnval){ [ 1120.411837] env[69328]: value = "task-3274113" [ 1120.411837] env[69328]: _type = "Task" [ 1120.411837] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.426906] env[69328]: DEBUG oslo_vmware.api [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3274105, 'name': RelocateVM_Task} progress is 97%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.430127] env[69328]: DEBUG oslo_vmware.api [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Task: {'id': task-3274113, 'name': Rename_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.494835] env[69328]: DEBUG oslo_concurrency.lockutils [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.362s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1120.494835] env[69328]: DEBUG nova.compute.manager [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1120.497105] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.780s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1120.498726] env[69328]: INFO nova.compute.claims [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1120.935637] env[69328]: DEBUG oslo_vmware.api [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Task: {'id': task-3274113, 'name': Rename_Task, 'duration_secs': 0.343727} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.935774] env[69328]: DEBUG oslo_vmware.api [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3274105, 'name': RelocateVM_Task} progress is 97%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.939866] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1120.943513] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c9940933-4263-4b74-804d-c95f63c2921c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.952978] env[69328]: DEBUG oslo_vmware.api [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Waiting for the task: (returnval){ [ 1120.952978] env[69328]: value = "task-3274114" [ 1120.952978] env[69328]: _type = "Task" [ 1120.952978] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.964985] env[69328]: DEBUG oslo_vmware.api [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Task: {'id': task-3274114, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.003614] env[69328]: DEBUG nova.compute.utils [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1121.007186] env[69328]: DEBUG nova.compute.manager [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1121.007367] env[69328]: DEBUG nova.network.neutron [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1121.121422] env[69328]: DEBUG nova.policy [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a8fbe2a134194d29af48ac8e4986d0cb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd86de4d5055642aa86a29c6768e3db46', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 1121.290958] env[69328]: DEBUG nova.network.neutron [req-a386c140-63a5-45a0-a877-9aff78bcdeba req-ab1f8db8-9e67-404d-a985-68c5bdbecc2e service nova] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Updated VIF entry in instance network info cache for port 7da3de27-ee87-400f-ae26-a3a6995a8363. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1121.292087] env[69328]: DEBUG nova.network.neutron [req-a386c140-63a5-45a0-a877-9aff78bcdeba req-ab1f8db8-9e67-404d-a985-68c5bdbecc2e service nova] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Updating instance_info_cache with network_info: [{"id": "7da3de27-ee87-400f-ae26-a3a6995a8363", "address": "fa:16:3e:91:9b:b5", "network": {"id": "bd41ac10-1850-45a2-8e46-6ebdca3d8e13", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-766393176-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.150", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efd0e2d2f9ba4416bd8fd08dad912465", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e614f8e-6b11-4b6b-a421-904bca6acd91", "external-id": "nsx-vlan-transportzone-923", "segmentation_id": 923, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7da3de27-ee", "ovs_interfaceid": "7da3de27-ee87-400f-ae26-a3a6995a8363", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1121.425402] env[69328]: DEBUG oslo_vmware.api [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3274105, 'name': RelocateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.469998] env[69328]: DEBUG oslo_vmware.api [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Task: {'id': task-3274114, 'name': PowerOnVM_Task} progress is 90%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.511768] env[69328]: DEBUG nova.compute.manager [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1121.664055] env[69328]: DEBUG nova.network.neutron [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] Successfully created port: 8ca9303b-2679-4187-add6-38fd1acef103 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1121.707145] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Acquiring lock "ae46c18e-15ae-4a47-b05a-a143f10b5ab6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1121.707417] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lock "ae46c18e-15ae-4a47-b05a-a143f10b5ab6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1121.794397] env[69328]: DEBUG oslo_concurrency.lockutils [req-a386c140-63a5-45a0-a877-9aff78bcdeba req-ab1f8db8-9e67-404d-a985-68c5bdbecc2e service nova] Releasing lock "refresh_cache-52c87371-4142-40d6-ac68-804aabd9f823" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1121.822353] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63b16d36-fad0-4f34-8fba-ef39895acd29 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.832652] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-027a11d8-68c5-4570-86ce-1568fcab1804 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.873991] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69647b07-88cc-4697-b28f-b8638361ef34 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.883566] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83241184-0071-4e09-9471-db1f5df1cb90 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.899204] env[69328]: DEBUG nova.compute.provider_tree [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1121.925420] env[69328]: DEBUG oslo_vmware.api [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3274105, 'name': RelocateVM_Task, 'duration_secs': 4.549458} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.925733] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Volume attach. Driver type: vmdk {{(pid=69328) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1121.925941] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653933', 'volume_id': 'a2de811d-614f-4456-ac21-52535c9e5fd6', 'name': 'volume-a2de811d-614f-4456-ac21-52535c9e5fd6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ee3609ea-0855-47c2-874c-349c80419781', 'attached_at': '', 'detached_at': '', 'volume_id': 'a2de811d-614f-4456-ac21-52535c9e5fd6', 'serial': 'a2de811d-614f-4456-ac21-52535c9e5fd6'} {{(pid=69328) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1121.927147] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f52c6ab8-c208-4c62-a972-876385baa217 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.945169] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-838cae35-2fe1-4dd1-bd50-8f83ae8c4c51 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.968704] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Reconfiguring VM instance instance-0000006e to attach disk [datastore2] volume-a2de811d-614f-4456-ac21-52535c9e5fd6/volume-a2de811d-614f-4456-ac21-52535c9e5fd6.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1121.972840] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5b36b6f9-3552-43a8-a694-bce45e92efaa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.993144] env[69328]: DEBUG oslo_vmware.api [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Task: {'id': task-3274114, 'name': PowerOnVM_Task, 'duration_secs': 0.597916} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.995167] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1121.995167] env[69328]: INFO nova.compute.manager [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Took 9.74 seconds to spawn the instance on the hypervisor. [ 1121.995167] env[69328]: DEBUG nova.compute.manager [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1121.995431] env[69328]: DEBUG oslo_vmware.api [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 1121.995431] env[69328]: value = "task-3274115" [ 1121.995431] env[69328]: _type = "Task" [ 1121.995431] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.996018] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f046e412-0dd1-462b-bf0c-0ccee98922c4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.013410] env[69328]: DEBUG oslo_vmware.api [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3274115, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.210415] env[69328]: DEBUG nova.compute.manager [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1122.246239] env[69328]: DEBUG nova.compute.manager [req-9461a6c2-06b0-4d46-becd-95c2df074202 req-8176bd15-e357-4de4-9116-a0f517830b53 service nova] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Received event network-changed-7da3de27-ee87-400f-ae26-a3a6995a8363 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1122.246360] env[69328]: DEBUG nova.compute.manager [req-9461a6c2-06b0-4d46-becd-95c2df074202 req-8176bd15-e357-4de4-9116-a0f517830b53 service nova] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Refreshing instance network info cache due to event network-changed-7da3de27-ee87-400f-ae26-a3a6995a8363. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1122.247138] env[69328]: DEBUG oslo_concurrency.lockutils [req-9461a6c2-06b0-4d46-becd-95c2df074202 req-8176bd15-e357-4de4-9116-a0f517830b53 service nova] Acquiring lock "refresh_cache-52c87371-4142-40d6-ac68-804aabd9f823" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1122.247138] env[69328]: DEBUG oslo_concurrency.lockutils [req-9461a6c2-06b0-4d46-becd-95c2df074202 req-8176bd15-e357-4de4-9116-a0f517830b53 service nova] Acquired lock "refresh_cache-52c87371-4142-40d6-ac68-804aabd9f823" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1122.247138] env[69328]: DEBUG nova.network.neutron [req-9461a6c2-06b0-4d46-becd-95c2df074202 req-8176bd15-e357-4de4-9116-a0f517830b53 service nova] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Refreshing network info cache for port 7da3de27-ee87-400f-ae26-a3a6995a8363 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1122.295871] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-265b401f-559d-40e3-bb74-e1b4d81ad73a tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Volume attach. Driver type: vmdk {{(pid=69328) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1122.296146] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-265b401f-559d-40e3-bb74-e1b4d81ad73a tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653948', 'volume_id': 'fef2b051-1a59-44f4-891c-513cd1a55f11', 'name': 'volume-fef2b051-1a59-44f4-891c-513cd1a55f11', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '275ef1ed-8e60-4151-b548-e22e5bd8efe2', 'attached_at': '', 'detached_at': '', 'volume_id': 'fef2b051-1a59-44f4-891c-513cd1a55f11', 'serial': 'fef2b051-1a59-44f4-891c-513cd1a55f11'} {{(pid=69328) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1122.297046] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d6f9123-26d7-419c-afda-7b9271565a47 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.316626] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4c2a363-25ee-4a70-97fd-e5919f32f487 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.345144] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-265b401f-559d-40e3-bb74-e1b4d81ad73a tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] volume-fef2b051-1a59-44f4-891c-513cd1a55f11/volume-fef2b051-1a59-44f4-891c-513cd1a55f11.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1122.345480] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1fea5b71-8aa7-47cb-aae8-3beae20ebbe5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.366620] env[69328]: DEBUG oslo_vmware.api [None req-265b401f-559d-40e3-bb74-e1b4d81ad73a tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Waiting for the task: (returnval){ [ 1122.366620] env[69328]: value = "task-3274116" [ 1122.366620] env[69328]: _type = "Task" [ 1122.366620] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.375475] env[69328]: DEBUG oslo_vmware.api [None req-265b401f-559d-40e3-bb74-e1b4d81ad73a tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': task-3274116, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.402589] env[69328]: DEBUG nova.scheduler.client.report [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1122.510954] env[69328]: DEBUG oslo_vmware.api [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3274115, 'name': ReconfigVM_Task, 'duration_secs': 0.321014} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.512279] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Reconfigured VM instance instance-0000006e to attach disk [datastore2] volume-a2de811d-614f-4456-ac21-52535c9e5fd6/volume-a2de811d-614f-4456-ac21-52535c9e5fd6.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1122.521848] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6b178da7-8ed0-4def-9743-e517aaec4a12 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.543668] env[69328]: DEBUG nova.compute.manager [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1122.547014] env[69328]: INFO nova.compute.manager [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Took 17.04 seconds to build instance. [ 1122.554769] env[69328]: DEBUG oslo_vmware.api [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 1122.554769] env[69328]: value = "task-3274117" [ 1122.554769] env[69328]: _type = "Task" [ 1122.554769] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.565303] env[69328]: DEBUG oslo_vmware.api [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3274117, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.576180] env[69328]: DEBUG nova.virt.hardware [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1122.576506] env[69328]: DEBUG nova.virt.hardware [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1122.576695] env[69328]: DEBUG nova.virt.hardware [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1122.576880] env[69328]: DEBUG nova.virt.hardware [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1122.577048] env[69328]: DEBUG nova.virt.hardware [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1122.577201] env[69328]: DEBUG nova.virt.hardware [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1122.578848] env[69328]: DEBUG nova.virt.hardware [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1122.578848] env[69328]: DEBUG nova.virt.hardware [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1122.578848] env[69328]: DEBUG nova.virt.hardware [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1122.578848] env[69328]: DEBUG nova.virt.hardware [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1122.578848] env[69328]: DEBUG nova.virt.hardware [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1122.579319] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d35ae97b-b90a-4324-993d-df2fe47bb8e4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.589099] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0c377bc-d178-454d-a063-ff150b467ef0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.737988] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1122.878754] env[69328]: DEBUG oslo_vmware.api [None req-265b401f-559d-40e3-bb74-e1b4d81ad73a tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': task-3274116, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.907874] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.411s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1122.908416] env[69328]: DEBUG nova.compute.manager [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1122.911610] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f35c241b-8b70-43a0-91e0-a6dde189ad31 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.908s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1122.915510] env[69328]: DEBUG nova.objects.instance [None req-f35c241b-8b70-43a0-91e0-a6dde189ad31 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lazy-loading 'resources' on Instance uuid 071c1837-9d0b-4b69-b16e-991b300385fb {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1123.049344] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8c1fcb8a-8dec-4c15-b67e-884ff1797da7 tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Lock "5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.550s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1123.066195] env[69328]: DEBUG oslo_vmware.api [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3274117, 'name': ReconfigVM_Task, 'duration_secs': 0.145079} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.066510] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653933', 'volume_id': 'a2de811d-614f-4456-ac21-52535c9e5fd6', 'name': 'volume-a2de811d-614f-4456-ac21-52535c9e5fd6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ee3609ea-0855-47c2-874c-349c80419781', 'attached_at': '', 'detached_at': '', 'volume_id': 'a2de811d-614f-4456-ac21-52535c9e5fd6', 'serial': 'a2de811d-614f-4456-ac21-52535c9e5fd6'} {{(pid=69328) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1123.067420] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4d0f64fe-d7dc-49df-a510-3a2dbf56f77e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.075896] env[69328]: DEBUG oslo_vmware.api [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 1123.075896] env[69328]: value = "task-3274118" [ 1123.075896] env[69328]: _type = "Task" [ 1123.075896] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.087699] env[69328]: DEBUG oslo_vmware.api [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3274118, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.377060] env[69328]: DEBUG oslo_vmware.api [None req-265b401f-559d-40e3-bb74-e1b4d81ad73a tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': task-3274116, 'name': ReconfigVM_Task, 'duration_secs': 0.780822} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.377595] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-265b401f-559d-40e3-bb74-e1b4d81ad73a tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Reconfigured VM instance instance-00000067 to attach disk [datastore1] volume-fef2b051-1a59-44f4-891c-513cd1a55f11/volume-fef2b051-1a59-44f4-891c-513cd1a55f11.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1123.382433] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bf5476f6-b979-4a8a-88c3-a03c56842584 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.399376] env[69328]: DEBUG oslo_vmware.api [None req-265b401f-559d-40e3-bb74-e1b4d81ad73a tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Waiting for the task: (returnval){ [ 1123.399376] env[69328]: value = "task-3274119" [ 1123.399376] env[69328]: _type = "Task" [ 1123.399376] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.407324] env[69328]: DEBUG oslo_vmware.api [None req-265b401f-559d-40e3-bb74-e1b4d81ad73a tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': task-3274119, 'name': ReconfigVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.419019] env[69328]: DEBUG nova.compute.utils [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1123.420608] env[69328]: DEBUG nova.compute.manager [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1123.420872] env[69328]: DEBUG nova.network.neutron [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1123.486744] env[69328]: DEBUG nova.network.neutron [req-9461a6c2-06b0-4d46-becd-95c2df074202 req-8176bd15-e357-4de4-9116-a0f517830b53 service nova] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Updated VIF entry in instance network info cache for port 7da3de27-ee87-400f-ae26-a3a6995a8363. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1123.487118] env[69328]: DEBUG nova.network.neutron [req-9461a6c2-06b0-4d46-becd-95c2df074202 req-8176bd15-e357-4de4-9116-a0f517830b53 service nova] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Updating instance_info_cache with network_info: [{"id": "7da3de27-ee87-400f-ae26-a3a6995a8363", "address": "fa:16:3e:91:9b:b5", "network": {"id": "bd41ac10-1850-45a2-8e46-6ebdca3d8e13", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-766393176-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.150", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efd0e2d2f9ba4416bd8fd08dad912465", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e614f8e-6b11-4b6b-a421-904bca6acd91", "external-id": "nsx-vlan-transportzone-923", "segmentation_id": 923, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7da3de27-ee", "ovs_interfaceid": "7da3de27-ee87-400f-ae26-a3a6995a8363", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1123.502898] env[69328]: DEBUG nova.policy [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '69ca01fd1d0f42b0b05a5426da9753ad', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '30209bc93a4042488f15c73b7e4733d5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 1123.591768] env[69328]: DEBUG oslo_vmware.api [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3274118, 'name': Rename_Task, 'duration_secs': 0.140685} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.595102] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1123.595425] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6d9afdc3-d957-4e9f-b6e4-f4622633a195 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.611339] env[69328]: DEBUG oslo_vmware.api [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 1123.611339] env[69328]: value = "task-3274120" [ 1123.611339] env[69328]: _type = "Task" [ 1123.611339] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.625655] env[69328]: DEBUG oslo_vmware.api [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3274120, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.695637] env[69328]: DEBUG nova.compute.manager [req-dab1aa23-7ebc-47de-814c-402e8d58a225 req-d9cfea15-f190-4892-aee1-edfa231758e6 service nova] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Received event network-changed-f52daaa5-48f6-4553-ac25-4a0103a7736f {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1123.695872] env[69328]: DEBUG nova.compute.manager [req-dab1aa23-7ebc-47de-814c-402e8d58a225 req-d9cfea15-f190-4892-aee1-edfa231758e6 service nova] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Refreshing instance network info cache due to event network-changed-f52daaa5-48f6-4553-ac25-4a0103a7736f. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1123.698557] env[69328]: DEBUG oslo_concurrency.lockutils [req-dab1aa23-7ebc-47de-814c-402e8d58a225 req-d9cfea15-f190-4892-aee1-edfa231758e6 service nova] Acquiring lock "refresh_cache-5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1123.698756] env[69328]: DEBUG oslo_concurrency.lockutils [req-dab1aa23-7ebc-47de-814c-402e8d58a225 req-d9cfea15-f190-4892-aee1-edfa231758e6 service nova] Acquired lock "refresh_cache-5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1123.698964] env[69328]: DEBUG nova.network.neutron [req-dab1aa23-7ebc-47de-814c-402e8d58a225 req-d9cfea15-f190-4892-aee1-edfa231758e6 service nova] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Refreshing network info cache for port f52daaa5-48f6-4553-ac25-4a0103a7736f {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1123.783030] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d762a897-4a57-4c70-9f75-d6d100c72814 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.797239] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-849890aa-1049-4eea-8296-b3fd93534e21 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.831495] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd7676ff-b30b-4014-84bd-eb73a2d6471e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.845554] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01558051-d50a-48f9-8c4b-2e7de9fd0aa3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.864954] env[69328]: DEBUG nova.compute.provider_tree [None req-f35c241b-8b70-43a0-91e0-a6dde189ad31 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1123.899787] env[69328]: DEBUG nova.network.neutron [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] Successfully updated port: 8ca9303b-2679-4187-add6-38fd1acef103 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1123.912249] env[69328]: DEBUG oslo_vmware.api [None req-265b401f-559d-40e3-bb74-e1b4d81ad73a tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': task-3274119, 'name': ReconfigVM_Task, 'duration_secs': 0.234998} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.912595] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-265b401f-559d-40e3-bb74-e1b4d81ad73a tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653948', 'volume_id': 'fef2b051-1a59-44f4-891c-513cd1a55f11', 'name': 'volume-fef2b051-1a59-44f4-891c-513cd1a55f11', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '275ef1ed-8e60-4151-b548-e22e5bd8efe2', 'attached_at': '', 'detached_at': '', 'volume_id': 'fef2b051-1a59-44f4-891c-513cd1a55f11', 'serial': 'fef2b051-1a59-44f4-891c-513cd1a55f11'} {{(pid=69328) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1123.931399] env[69328]: DEBUG nova.compute.manager [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1123.986857] env[69328]: DEBUG nova.network.neutron [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Successfully created port: 509b2377-84e7-48a6-b2ed-811f288cc65c {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1123.991835] env[69328]: DEBUG oslo_concurrency.lockutils [req-9461a6c2-06b0-4d46-becd-95c2df074202 req-8176bd15-e357-4de4-9116-a0f517830b53 service nova] Releasing lock "refresh_cache-52c87371-4142-40d6-ac68-804aabd9f823" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1124.123883] env[69328]: DEBUG oslo_vmware.api [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3274120, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.368918] env[69328]: DEBUG nova.scheduler.client.report [None req-f35c241b-8b70-43a0-91e0-a6dde189ad31 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1124.408254] env[69328]: DEBUG oslo_concurrency.lockutils [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "refresh_cache-32b9acbc-35a0-4d67-ac74-ef46c45fa0b9" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1124.408381] env[69328]: DEBUG oslo_concurrency.lockutils [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquired lock "refresh_cache-32b9acbc-35a0-4d67-ac74-ef46c45fa0b9" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1124.408550] env[69328]: DEBUG nova.network.neutron [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1124.461386] env[69328]: DEBUG nova.network.neutron [req-dab1aa23-7ebc-47de-814c-402e8d58a225 req-d9cfea15-f190-4892-aee1-edfa231758e6 service nova] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Updated VIF entry in instance network info cache for port f52daaa5-48f6-4553-ac25-4a0103a7736f. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1124.461744] env[69328]: DEBUG nova.network.neutron [req-dab1aa23-7ebc-47de-814c-402e8d58a225 req-d9cfea15-f190-4892-aee1-edfa231758e6 service nova] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Updating instance_info_cache with network_info: [{"id": "f52daaa5-48f6-4553-ac25-4a0103a7736f", "address": "fa:16:3e:a0:32:13", "network": {"id": "23570c12-86fb-49f7-8b67-c216378fd5e7", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-769391723-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e32664bc571e4ebdabcb1b4956a677fc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b107fab-ee71-47db-ad4d-3c6f05546843", "external-id": "cl2-zone-554", "segmentation_id": 554, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf52daaa5-48", "ovs_interfaceid": "f52daaa5-48f6-4553-ac25-4a0103a7736f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1124.468989] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Acquiring lock "79d66d5d-e1a4-4bc0-8e43-db97153867e3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1124.469270] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Lock "79d66d5d-e1a4-4bc0-8e43-db97153867e3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1124.625892] env[69328]: DEBUG oslo_vmware.api [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3274120, 'name': PowerOnVM_Task, 'duration_secs': 0.527224} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.626331] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1124.626590] env[69328]: INFO nova.compute.manager [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Took 9.99 seconds to spawn the instance on the hypervisor. [ 1124.626815] env[69328]: DEBUG nova.compute.manager [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1124.627697] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-385ed7c2-f21b-408a-9faf-4c03557a101d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.874196] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f35c241b-8b70-43a0-91e0-a6dde189ad31 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.962s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1124.876665] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.139s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1124.878193] env[69328]: INFO nova.compute.claims [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1124.897903] env[69328]: INFO nova.scheduler.client.report [None req-f35c241b-8b70-43a0-91e0-a6dde189ad31 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Deleted allocations for instance 071c1837-9d0b-4b69-b16e-991b300385fb [ 1124.941044] env[69328]: DEBUG nova.compute.manager [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1124.950471] env[69328]: DEBUG nova.network.neutron [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1124.964524] env[69328]: DEBUG nova.objects.instance [None req-265b401f-559d-40e3-bb74-e1b4d81ad73a tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Lazy-loading 'flavor' on Instance uuid 275ef1ed-8e60-4151-b548-e22e5bd8efe2 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1124.971481] env[69328]: DEBUG oslo_concurrency.lockutils [req-dab1aa23-7ebc-47de-814c-402e8d58a225 req-d9cfea15-f190-4892-aee1-edfa231758e6 service nova] Releasing lock "refresh_cache-5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1124.972020] env[69328]: DEBUG nova.compute.manager [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1124.976389] env[69328]: DEBUG nova.virt.hardware [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1124.976611] env[69328]: DEBUG nova.virt.hardware [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1124.976768] env[69328]: DEBUG nova.virt.hardware [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1124.976948] env[69328]: DEBUG nova.virt.hardware [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1124.977124] env[69328]: DEBUG nova.virt.hardware [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1124.977277] env[69328]: DEBUG nova.virt.hardware [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1124.977485] env[69328]: DEBUG nova.virt.hardware [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1124.977643] env[69328]: DEBUG nova.virt.hardware [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1124.977807] env[69328]: DEBUG nova.virt.hardware [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1124.977966] env[69328]: DEBUG nova.virt.hardware [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1124.978263] env[69328]: DEBUG nova.virt.hardware [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1124.979419] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-079d17bb-a3d7-485d-91e9-ab3df7e18a5f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.989108] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb8b5048-0ae8-4892-8314-3b6dd4ac6d36 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.108782] env[69328]: DEBUG nova.network.neutron [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] Updating instance_info_cache with network_info: [{"id": "8ca9303b-2679-4187-add6-38fd1acef103", "address": "fa:16:3e:79:38:b9", "network": {"id": "7f5dcab4-3cec-42a1-b589-88cb373af645", "bridge": "br-int", "label": "tempest-ServersTestJSON-1833802368-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d86de4d5055642aa86a29c6768e3db46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b94712a6-b777-47dd-bc06-f9acfce2d936", "external-id": "nsx-vlan-transportzone-494", "segmentation_id": 494, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ca9303b-26", "ovs_interfaceid": "8ca9303b-2679-4187-add6-38fd1acef103", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1125.149979] env[69328]: INFO nova.compute.manager [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Took 18.72 seconds to build instance. [ 1125.332390] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7ea9b2a3-83fb-43e1-b739-a7cde265bc49 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Acquiring lock "275ef1ed-8e60-4151-b548-e22e5bd8efe2" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1125.375338] env[69328]: DEBUG nova.compute.manager [req-a76231ca-2108-4431-af87-cdff39489887 req-ccd52841-f3a0-476e-b274-a83e2a4fa529 service nova] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Received event network-vif-plugged-509b2377-84e7-48a6-b2ed-811f288cc65c {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1125.375549] env[69328]: DEBUG oslo_concurrency.lockutils [req-a76231ca-2108-4431-af87-cdff39489887 req-ccd52841-f3a0-476e-b274-a83e2a4fa529 service nova] Acquiring lock "33583ef3-252c-45d4-a514-5646f98c5f45-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1125.375756] env[69328]: DEBUG oslo_concurrency.lockutils [req-a76231ca-2108-4431-af87-cdff39489887 req-ccd52841-f3a0-476e-b274-a83e2a4fa529 service nova] Lock "33583ef3-252c-45d4-a514-5646f98c5f45-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1125.375920] env[69328]: DEBUG oslo_concurrency.lockutils [req-a76231ca-2108-4431-af87-cdff39489887 req-ccd52841-f3a0-476e-b274-a83e2a4fa529 service nova] Lock "33583ef3-252c-45d4-a514-5646f98c5f45-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1125.376319] env[69328]: DEBUG nova.compute.manager [req-a76231ca-2108-4431-af87-cdff39489887 req-ccd52841-f3a0-476e-b274-a83e2a4fa529 service nova] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] No waiting events found dispatching network-vif-plugged-509b2377-84e7-48a6-b2ed-811f288cc65c {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1125.376518] env[69328]: WARNING nova.compute.manager [req-a76231ca-2108-4431-af87-cdff39489887 req-ccd52841-f3a0-476e-b274-a83e2a4fa529 service nova] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Received unexpected event network-vif-plugged-509b2377-84e7-48a6-b2ed-811f288cc65c for instance with vm_state building and task_state spawning. [ 1125.406592] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f35c241b-8b70-43a0-91e0-a6dde189ad31 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "071c1837-9d0b-4b69-b16e-991b300385fb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.408s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1125.469444] env[69328]: DEBUG oslo_concurrency.lockutils [None req-265b401f-559d-40e3-bb74-e1b4d81ad73a tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Lock "275ef1ed-8e60-4151-b548-e22e5bd8efe2" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.809s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1125.470796] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7ea9b2a3-83fb-43e1-b739-a7cde265bc49 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Lock "275ef1ed-8e60-4151-b548-e22e5bd8efe2" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.139s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1125.486205] env[69328]: DEBUG nova.network.neutron [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Successfully updated port: 509b2377-84e7-48a6-b2ed-811f288cc65c {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1125.508997] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1125.612197] env[69328]: DEBUG oslo_concurrency.lockutils [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Releasing lock "refresh_cache-32b9acbc-35a0-4d67-ac74-ef46c45fa0b9" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1125.612552] env[69328]: DEBUG nova.compute.manager [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] Instance network_info: |[{"id": "8ca9303b-2679-4187-add6-38fd1acef103", "address": "fa:16:3e:79:38:b9", "network": {"id": "7f5dcab4-3cec-42a1-b589-88cb373af645", "bridge": "br-int", "label": "tempest-ServersTestJSON-1833802368-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d86de4d5055642aa86a29c6768e3db46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b94712a6-b777-47dd-bc06-f9acfce2d936", "external-id": "nsx-vlan-transportzone-494", "segmentation_id": 494, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ca9303b-26", "ovs_interfaceid": "8ca9303b-2679-4187-add6-38fd1acef103", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1125.612991] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:79:38:b9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b94712a6-b777-47dd-bc06-f9acfce2d936', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8ca9303b-2679-4187-add6-38fd1acef103', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1125.620664] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1125.620886] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1125.621122] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0047ab14-70d4-4137-b123-f3cfa876b94b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.641734] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1125.641734] env[69328]: value = "task-3274121" [ 1125.641734] env[69328]: _type = "Task" [ 1125.641734] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.649592] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274121, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.652161] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d8ebc6e8-bb6a-40b2-a590-49a69b69c221 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "ee3609ea-0855-47c2-874c-349c80419781" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.240s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1125.732074] env[69328]: DEBUG nova.compute.manager [req-4d7bc0d7-83fc-47a5-be7f-8286543d6bf5 req-5478ce3f-fd9a-4a5c-aa4a-bc96e03f17ac service nova] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] Received event network-vif-plugged-8ca9303b-2679-4187-add6-38fd1acef103 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1125.732074] env[69328]: DEBUG oslo_concurrency.lockutils [req-4d7bc0d7-83fc-47a5-be7f-8286543d6bf5 req-5478ce3f-fd9a-4a5c-aa4a-bc96e03f17ac service nova] Acquiring lock "32b9acbc-35a0-4d67-ac74-ef46c45fa0b9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1125.733462] env[69328]: DEBUG oslo_concurrency.lockutils [req-4d7bc0d7-83fc-47a5-be7f-8286543d6bf5 req-5478ce3f-fd9a-4a5c-aa4a-bc96e03f17ac service nova] Lock "32b9acbc-35a0-4d67-ac74-ef46c45fa0b9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1125.733792] env[69328]: DEBUG oslo_concurrency.lockutils [req-4d7bc0d7-83fc-47a5-be7f-8286543d6bf5 req-5478ce3f-fd9a-4a5c-aa4a-bc96e03f17ac service nova] Lock "32b9acbc-35a0-4d67-ac74-ef46c45fa0b9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.002s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1125.734175] env[69328]: DEBUG nova.compute.manager [req-4d7bc0d7-83fc-47a5-be7f-8286543d6bf5 req-5478ce3f-fd9a-4a5c-aa4a-bc96e03f17ac service nova] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] No waiting events found dispatching network-vif-plugged-8ca9303b-2679-4187-add6-38fd1acef103 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1125.734420] env[69328]: WARNING nova.compute.manager [req-4d7bc0d7-83fc-47a5-be7f-8286543d6bf5 req-5478ce3f-fd9a-4a5c-aa4a-bc96e03f17ac service nova] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] Received unexpected event network-vif-plugged-8ca9303b-2679-4187-add6-38fd1acef103 for instance with vm_state building and task_state spawning. [ 1125.734716] env[69328]: DEBUG nova.compute.manager [req-4d7bc0d7-83fc-47a5-be7f-8286543d6bf5 req-5478ce3f-fd9a-4a5c-aa4a-bc96e03f17ac service nova] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] Received event network-changed-8ca9303b-2679-4187-add6-38fd1acef103 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1125.734984] env[69328]: DEBUG nova.compute.manager [req-4d7bc0d7-83fc-47a5-be7f-8286543d6bf5 req-5478ce3f-fd9a-4a5c-aa4a-bc96e03f17ac service nova] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] Refreshing instance network info cache due to event network-changed-8ca9303b-2679-4187-add6-38fd1acef103. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1125.735347] env[69328]: DEBUG oslo_concurrency.lockutils [req-4d7bc0d7-83fc-47a5-be7f-8286543d6bf5 req-5478ce3f-fd9a-4a5c-aa4a-bc96e03f17ac service nova] Acquiring lock "refresh_cache-32b9acbc-35a0-4d67-ac74-ef46c45fa0b9" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1125.735560] env[69328]: DEBUG oslo_concurrency.lockutils [req-4d7bc0d7-83fc-47a5-be7f-8286543d6bf5 req-5478ce3f-fd9a-4a5c-aa4a-bc96e03f17ac service nova] Acquired lock "refresh_cache-32b9acbc-35a0-4d67-ac74-ef46c45fa0b9" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1125.735809] env[69328]: DEBUG nova.network.neutron [req-4d7bc0d7-83fc-47a5-be7f-8286543d6bf5 req-5478ce3f-fd9a-4a5c-aa4a-bc96e03f17ac service nova] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] Refreshing network info cache for port 8ca9303b-2679-4187-add6-38fd1acef103 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1125.978305] env[69328]: INFO nova.compute.manager [None req-7ea9b2a3-83fb-43e1-b739-a7cde265bc49 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Detaching volume fef2b051-1a59-44f4-891c-513cd1a55f11 [ 1125.990311] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "refresh_cache-33583ef3-252c-45d4-a514-5646f98c5f45" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1125.991025] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquired lock "refresh_cache-33583ef3-252c-45d4-a514-5646f98c5f45" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1125.991025] env[69328]: DEBUG nova.network.neutron [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1126.019947] env[69328]: INFO nova.virt.block_device [None req-7ea9b2a3-83fb-43e1-b739-a7cde265bc49 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Attempting to driver detach volume fef2b051-1a59-44f4-891c-513cd1a55f11 from mountpoint /dev/sdb [ 1126.020118] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ea9b2a3-83fb-43e1-b739-a7cde265bc49 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Volume detach. Driver type: vmdk {{(pid=69328) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1126.020443] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ea9b2a3-83fb-43e1-b739-a7cde265bc49 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653948', 'volume_id': 'fef2b051-1a59-44f4-891c-513cd1a55f11', 'name': 'volume-fef2b051-1a59-44f4-891c-513cd1a55f11', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '275ef1ed-8e60-4151-b548-e22e5bd8efe2', 'attached_at': '', 'detached_at': '', 'volume_id': 'fef2b051-1a59-44f4-891c-513cd1a55f11', 'serial': 'fef2b051-1a59-44f4-891c-513cd1a55f11'} {{(pid=69328) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1126.021453] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aab18bc-766a-43cc-83a9-23a533470b75 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.053583] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-790cbfc8-2359-4842-8b2f-6b6128ef44bf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.061553] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dddc9db-e931-423f-a1a4-157c9ab14102 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.085707] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf9bcaab-9b4a-4252-86e7-0487803d2900 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.104021] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ea9b2a3-83fb-43e1-b739-a7cde265bc49 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] The volume has not been displaced from its original location: [datastore1] volume-fef2b051-1a59-44f4-891c-513cd1a55f11/volume-fef2b051-1a59-44f4-891c-513cd1a55f11.vmdk. No consolidation needed. {{(pid=69328) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1126.108283] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ea9b2a3-83fb-43e1-b739-a7cde265bc49 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Reconfiguring VM instance instance-00000067 to detach disk 2001 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1126.112503] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-61b8e967-ca41-4b07-b709-781ca0067679 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.130540] env[69328]: DEBUG oslo_vmware.api [None req-7ea9b2a3-83fb-43e1-b739-a7cde265bc49 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Waiting for the task: (returnval){ [ 1126.130540] env[69328]: value = "task-3274122" [ 1126.130540] env[69328]: _type = "Task" [ 1126.130540] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.143692] env[69328]: DEBUG oslo_vmware.api [None req-7ea9b2a3-83fb-43e1-b739-a7cde265bc49 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': task-3274122, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.152533] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274121, 'name': CreateVM_Task, 'duration_secs': 0.358788} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.155423] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1126.156478] env[69328]: DEBUG oslo_concurrency.lockutils [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1126.157380] env[69328]: DEBUG oslo_concurrency.lockutils [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1126.157380] env[69328]: DEBUG oslo_concurrency.lockutils [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1126.157380] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-08bff8c5-5ba7-4f81-83c4-c18b988cdd46 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.162849] env[69328]: DEBUG oslo_vmware.api [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 1126.162849] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]527f1884-39a8-18d4-3a6f-746461422de3" [ 1126.162849] env[69328]: _type = "Task" [ 1126.162849] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.171513] env[69328]: DEBUG oslo_vmware.api [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]527f1884-39a8-18d4-3a6f-746461422de3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.218324] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bda94884-7ade-4e84-a546-592d640acfbe {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.227860] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ef2fbbf-7d35-4461-8e46-125b88484b6b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.265191] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25403b4c-0046-4dc3-b136-3083c164b015 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.273724] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0799883d-c652-4637-ae19-cc24c4774d1e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.289060] env[69328]: DEBUG nova.compute.provider_tree [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1126.535143] env[69328]: DEBUG nova.network.neutron [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1126.572891] env[69328]: DEBUG nova.network.neutron [req-4d7bc0d7-83fc-47a5-be7f-8286543d6bf5 req-5478ce3f-fd9a-4a5c-aa4a-bc96e03f17ac service nova] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] Updated VIF entry in instance network info cache for port 8ca9303b-2679-4187-add6-38fd1acef103. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1126.573603] env[69328]: DEBUG nova.network.neutron [req-4d7bc0d7-83fc-47a5-be7f-8286543d6bf5 req-5478ce3f-fd9a-4a5c-aa4a-bc96e03f17ac service nova] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] Updating instance_info_cache with network_info: [{"id": "8ca9303b-2679-4187-add6-38fd1acef103", "address": "fa:16:3e:79:38:b9", "network": {"id": "7f5dcab4-3cec-42a1-b589-88cb373af645", "bridge": "br-int", "label": "tempest-ServersTestJSON-1833802368-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d86de4d5055642aa86a29c6768e3db46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b94712a6-b777-47dd-bc06-f9acfce2d936", "external-id": "nsx-vlan-transportzone-494", "segmentation_id": 494, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ca9303b-26", "ovs_interfaceid": "8ca9303b-2679-4187-add6-38fd1acef103", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1126.640672] env[69328]: DEBUG oslo_vmware.api [None req-7ea9b2a3-83fb-43e1-b739-a7cde265bc49 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': task-3274122, 'name': ReconfigVM_Task, 'duration_secs': 0.268048} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.641615] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ea9b2a3-83fb-43e1-b739-a7cde265bc49 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Reconfigured VM instance instance-00000067 to detach disk 2001 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1126.650019] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d7bcc8c8-e016-40b8-a24e-b1886bfb40e4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.664801] env[69328]: DEBUG oslo_vmware.api [None req-7ea9b2a3-83fb-43e1-b739-a7cde265bc49 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Waiting for the task: (returnval){ [ 1126.664801] env[69328]: value = "task-3274123" [ 1126.664801] env[69328]: _type = "Task" [ 1126.664801] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.677368] env[69328]: DEBUG oslo_vmware.api [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]527f1884-39a8-18d4-3a6f-746461422de3, 'name': SearchDatastore_Task, 'duration_secs': 0.01306} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.683037] env[69328]: DEBUG oslo_concurrency.lockutils [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1126.683037] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1126.683037] env[69328]: DEBUG oslo_concurrency.lockutils [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1126.683037] env[69328]: DEBUG oslo_concurrency.lockutils [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1126.683037] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1126.683037] env[69328]: DEBUG oslo_vmware.api [None req-7ea9b2a3-83fb-43e1-b739-a7cde265bc49 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': task-3274123, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.683037] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d2c8400d-9c62-4885-9c19-559aeefdef53 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.695172] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1126.695172] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1126.696122] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2feeef5b-bf48-4e8c-8c92-609defc4222a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.703961] env[69328]: DEBUG oslo_vmware.api [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 1126.703961] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52165ff9-3f43-c853-84a4-2c54bf1094c5" [ 1126.703961] env[69328]: _type = "Task" [ 1126.703961] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.713982] env[69328]: DEBUG oslo_vmware.api [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52165ff9-3f43-c853-84a4-2c54bf1094c5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.772659] env[69328]: DEBUG nova.network.neutron [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Updating instance_info_cache with network_info: [{"id": "509b2377-84e7-48a6-b2ed-811f288cc65c", "address": "fa:16:3e:f8:6d:5c", "network": {"id": "aed15283-4a79-4e99-8b6c-49cf754138de", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1271042528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30209bc93a4042488f15c73b7e4733d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap509b2377-84", "ovs_interfaceid": "509b2377-84e7-48a6-b2ed-811f288cc65c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1126.793037] env[69328]: DEBUG nova.scheduler.client.report [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1127.076679] env[69328]: DEBUG oslo_concurrency.lockutils [req-4d7bc0d7-83fc-47a5-be7f-8286543d6bf5 req-5478ce3f-fd9a-4a5c-aa4a-bc96e03f17ac service nova] Releasing lock "refresh_cache-32b9acbc-35a0-4d67-ac74-ef46c45fa0b9" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1127.183057] env[69328]: DEBUG oslo_vmware.api [None req-7ea9b2a3-83fb-43e1-b739-a7cde265bc49 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': task-3274123, 'name': ReconfigVM_Task, 'duration_secs': 0.171244} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.183057] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ea9b2a3-83fb-43e1-b739-a7cde265bc49 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653948', 'volume_id': 'fef2b051-1a59-44f4-891c-513cd1a55f11', 'name': 'volume-fef2b051-1a59-44f4-891c-513cd1a55f11', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '275ef1ed-8e60-4151-b548-e22e5bd8efe2', 'attached_at': '', 'detached_at': '', 'volume_id': 'fef2b051-1a59-44f4-891c-513cd1a55f11', 'serial': 'fef2b051-1a59-44f4-891c-513cd1a55f11'} {{(pid=69328) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1127.214937] env[69328]: DEBUG oslo_vmware.api [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52165ff9-3f43-c853-84a4-2c54bf1094c5, 'name': SearchDatastore_Task, 'duration_secs': 0.01202} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.215934] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9627cff-1cc7-4e32-a1d5-28e44fbefa38 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.225026] env[69328]: DEBUG oslo_vmware.api [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 1127.225026] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f7cc13-6754-ebc4-1024-22787d82967d" [ 1127.225026] env[69328]: _type = "Task" [ 1127.225026] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.231907] env[69328]: DEBUG oslo_vmware.api [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f7cc13-6754-ebc4-1024-22787d82967d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.278530] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Releasing lock "refresh_cache-33583ef3-252c-45d4-a514-5646f98c5f45" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1127.278833] env[69328]: DEBUG nova.compute.manager [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Instance network_info: |[{"id": "509b2377-84e7-48a6-b2ed-811f288cc65c", "address": "fa:16:3e:f8:6d:5c", "network": {"id": "aed15283-4a79-4e99-8b6c-49cf754138de", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1271042528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30209bc93a4042488f15c73b7e4733d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap509b2377-84", "ovs_interfaceid": "509b2377-84e7-48a6-b2ed-811f288cc65c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1127.279322] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f8:6d:5c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'de7fa486-5f28-44ae-b0cf-72234ff87546', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '509b2377-84e7-48a6-b2ed-811f288cc65c', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1127.286942] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1127.288181] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1127.288181] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f7455515-effe-4f50-9965-f6c57f14b5d0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.303016] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.426s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1127.303820] env[69328]: DEBUG nova.compute.manager [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1127.307741] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.799s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1127.309417] env[69328]: INFO nova.compute.claims [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1127.318516] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1127.318516] env[69328]: value = "task-3274124" [ 1127.318516] env[69328]: _type = "Task" [ 1127.318516] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.329086] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274124, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.420042] env[69328]: DEBUG nova.compute.manager [req-80536940-076d-4fea-a3e8-ae936cd1a6e3 req-c10c4640-6dc5-4aa7-ac9c-7d5d613379d1 service nova] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Received event network-changed-509b2377-84e7-48a6-b2ed-811f288cc65c {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1127.420306] env[69328]: DEBUG nova.compute.manager [req-80536940-076d-4fea-a3e8-ae936cd1a6e3 req-c10c4640-6dc5-4aa7-ac9c-7d5d613379d1 service nova] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Refreshing instance network info cache due to event network-changed-509b2377-84e7-48a6-b2ed-811f288cc65c. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1127.420853] env[69328]: DEBUG oslo_concurrency.lockutils [req-80536940-076d-4fea-a3e8-ae936cd1a6e3 req-c10c4640-6dc5-4aa7-ac9c-7d5d613379d1 service nova] Acquiring lock "refresh_cache-33583ef3-252c-45d4-a514-5646f98c5f45" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1127.420853] env[69328]: DEBUG oslo_concurrency.lockutils [req-80536940-076d-4fea-a3e8-ae936cd1a6e3 req-c10c4640-6dc5-4aa7-ac9c-7d5d613379d1 service nova] Acquired lock "refresh_cache-33583ef3-252c-45d4-a514-5646f98c5f45" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1127.421177] env[69328]: DEBUG nova.network.neutron [req-80536940-076d-4fea-a3e8-ae936cd1a6e3 req-c10c4640-6dc5-4aa7-ac9c-7d5d613379d1 service nova] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Refreshing network info cache for port 509b2377-84e7-48a6-b2ed-811f288cc65c {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1127.734500] env[69328]: DEBUG oslo_vmware.api [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f7cc13-6754-ebc4-1024-22787d82967d, 'name': SearchDatastore_Task, 'duration_secs': 0.029483} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.734971] env[69328]: DEBUG oslo_concurrency.lockutils [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1127.734971] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9/32b9acbc-35a0-4d67-ac74-ef46c45fa0b9.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1127.739538] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-211fb5f0-3639-43aa-a36a-32f12349ee24 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.739880] env[69328]: DEBUG nova.objects.instance [None req-7ea9b2a3-83fb-43e1-b739-a7cde265bc49 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Lazy-loading 'flavor' on Instance uuid 275ef1ed-8e60-4151-b548-e22e5bd8efe2 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1127.743323] env[69328]: DEBUG oslo_vmware.api [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 1127.743323] env[69328]: value = "task-3274125" [ 1127.743323] env[69328]: _type = "Task" [ 1127.743323] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.751678] env[69328]: DEBUG oslo_vmware.api [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3274125, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.816991] env[69328]: DEBUG nova.compute.utils [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1127.818549] env[69328]: DEBUG nova.compute.manager [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1127.818730] env[69328]: DEBUG nova.network.neutron [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1127.833120] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274124, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.884024] env[69328]: DEBUG nova.compute.manager [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Stashing vm_state: active {{(pid=69328) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1127.895396] env[69328]: DEBUG nova.policy [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'aed0f81423aa4a24949ad1dc3cfdef2c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f50ac50ef6ae4abc83a8064746de7029', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 1128.185457] env[69328]: DEBUG nova.network.neutron [req-80536940-076d-4fea-a3e8-ae936cd1a6e3 req-c10c4640-6dc5-4aa7-ac9c-7d5d613379d1 service nova] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Updated VIF entry in instance network info cache for port 509b2377-84e7-48a6-b2ed-811f288cc65c. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1128.185825] env[69328]: DEBUG nova.network.neutron [req-80536940-076d-4fea-a3e8-ae936cd1a6e3 req-c10c4640-6dc5-4aa7-ac9c-7d5d613379d1 service nova] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Updating instance_info_cache with network_info: [{"id": "509b2377-84e7-48a6-b2ed-811f288cc65c", "address": "fa:16:3e:f8:6d:5c", "network": {"id": "aed15283-4a79-4e99-8b6c-49cf754138de", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1271042528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30209bc93a4042488f15c73b7e4733d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap509b2377-84", "ovs_interfaceid": "509b2377-84e7-48a6-b2ed-811f288cc65c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1128.197908] env[69328]: DEBUG nova.network.neutron [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Successfully created port: 19978029-822a-48e0-b3c1-9d885b82a5f3 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1128.261372] env[69328]: DEBUG oslo_vmware.api [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3274125, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.323054] env[69328]: DEBUG nova.compute.manager [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1128.336592] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274124, 'name': CreateVM_Task, 'duration_secs': 0.602038} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.336784] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1128.337917] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1128.338094] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1128.338433] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1128.338704] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-acc56b27-6cdd-4b1d-8c7d-598251416d7e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.346644] env[69328]: DEBUG oslo_vmware.api [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for the task: (returnval){ [ 1128.346644] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f34451-3c7a-2d3c-efa9-661874f37a36" [ 1128.346644] env[69328]: _type = "Task" [ 1128.346644] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.356216] env[69328]: DEBUG oslo_vmware.api [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f34451-3c7a-2d3c-efa9-661874f37a36, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.405956] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1128.481715] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquiring lock "ff815ffb-3422-469e-9b54-b33502826513" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1128.482119] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "ff815ffb-3422-469e-9b54-b33502826513" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1128.610439] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ad7e66e-2dfc-4176-b082-4443b6e6be18 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.619455] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5714e1d8-2e64-4bb8-a299-641070af7b7a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.650604] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67a15aa1-dd33-47de-bf5b-181d69e9d315 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.658585] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dbe9f77-7f43-4afc-8980-949224319435 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.672413] env[69328]: DEBUG nova.compute.provider_tree [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1128.688675] env[69328]: DEBUG oslo_concurrency.lockutils [req-80536940-076d-4fea-a3e8-ae936cd1a6e3 req-c10c4640-6dc5-4aa7-ac9c-7d5d613379d1 service nova] Releasing lock "refresh_cache-33583ef3-252c-45d4-a514-5646f98c5f45" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1128.688948] env[69328]: DEBUG nova.compute.manager [req-80536940-076d-4fea-a3e8-ae936cd1a6e3 req-c10c4640-6dc5-4aa7-ac9c-7d5d613379d1 service nova] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Received event network-changed-9e189e9a-ecbf-475e-82a4-508c1a0aec74 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1128.689133] env[69328]: DEBUG nova.compute.manager [req-80536940-076d-4fea-a3e8-ae936cd1a6e3 req-c10c4640-6dc5-4aa7-ac9c-7d5d613379d1 service nova] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Refreshing instance network info cache due to event network-changed-9e189e9a-ecbf-475e-82a4-508c1a0aec74. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1128.689347] env[69328]: DEBUG oslo_concurrency.lockutils [req-80536940-076d-4fea-a3e8-ae936cd1a6e3 req-c10c4640-6dc5-4aa7-ac9c-7d5d613379d1 service nova] Acquiring lock "refresh_cache-b0a1441c-81e2-4131-a2ff-f5042d559d9f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1128.689507] env[69328]: DEBUG oslo_concurrency.lockutils [req-80536940-076d-4fea-a3e8-ae936cd1a6e3 req-c10c4640-6dc5-4aa7-ac9c-7d5d613379d1 service nova] Acquired lock "refresh_cache-b0a1441c-81e2-4131-a2ff-f5042d559d9f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1128.689636] env[69328]: DEBUG nova.network.neutron [req-80536940-076d-4fea-a3e8-ae936cd1a6e3 req-c10c4640-6dc5-4aa7-ac9c-7d5d613379d1 service nova] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Refreshing network info cache for port 9e189e9a-ecbf-475e-82a4-508c1a0aec74 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1128.753849] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7ea9b2a3-83fb-43e1-b739-a7cde265bc49 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Lock "275ef1ed-8e60-4151-b548-e22e5bd8efe2" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.283s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1128.754938] env[69328]: DEBUG oslo_vmware.api [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3274125, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.932588} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.755379] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9/32b9acbc-35a0-4d67-ac74-ef46c45fa0b9.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1128.755600] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1128.755841] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e32d51ef-3d79-424b-b4b0-a9159ef2ee44 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.762764] env[69328]: DEBUG oslo_vmware.api [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 1128.762764] env[69328]: value = "task-3274126" [ 1128.762764] env[69328]: _type = "Task" [ 1128.762764] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.772840] env[69328]: DEBUG oslo_vmware.api [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3274126, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.859820] env[69328]: DEBUG oslo_vmware.api [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f34451-3c7a-2d3c-efa9-661874f37a36, 'name': SearchDatastore_Task, 'duration_secs': 0.05706} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.860114] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1128.860372] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1128.860634] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1128.860785] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1128.861113] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1128.861454] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1c3a0410-3d84-4ea2-aa89-42b09ec044dd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.873653] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1128.873870] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1128.875167] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7aeed115-5c3d-44cb-bb66-5debae78bc8c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.881500] env[69328]: DEBUG oslo_vmware.api [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for the task: (returnval){ [ 1128.881500] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52bdcf1d-a8ca-3346-1bb0-21e16be9596c" [ 1128.881500] env[69328]: _type = "Task" [ 1128.881500] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.889717] env[69328]: DEBUG oslo_vmware.api [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52bdcf1d-a8ca-3346-1bb0-21e16be9596c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.984945] env[69328]: DEBUG nova.compute.manager [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: ff815ffb-3422-469e-9b54-b33502826513] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1129.176202] env[69328]: DEBUG nova.scheduler.client.report [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1129.273891] env[69328]: DEBUG oslo_vmware.api [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3274126, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.117339} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.274338] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1129.275203] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50ca9829-a7dd-4f1a-9d0f-37bf934dae12 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.298962] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] Reconfiguring VM instance instance-0000006f to attach disk [datastore1] 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9/32b9acbc-35a0-4d67-ac74-ef46c45fa0b9.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1129.301569] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ad148ca6-1f9a-4fec-946e-638a361cb02e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.325171] env[69328]: DEBUG oslo_vmware.api [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 1129.325171] env[69328]: value = "task-3274127" [ 1129.325171] env[69328]: _type = "Task" [ 1129.325171] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.333047] env[69328]: DEBUG oslo_vmware.api [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3274127, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.337025] env[69328]: DEBUG nova.compute.manager [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1129.359861] env[69328]: DEBUG nova.virt.hardware [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1129.360916] env[69328]: DEBUG nova.virt.hardware [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1129.361359] env[69328]: DEBUG nova.virt.hardware [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1129.361770] env[69328]: DEBUG nova.virt.hardware [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1129.362130] env[69328]: DEBUG nova.virt.hardware [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1129.362450] env[69328]: DEBUG nova.virt.hardware [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1129.362820] env[69328]: DEBUG nova.virt.hardware [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1129.363185] env[69328]: DEBUG nova.virt.hardware [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1129.363524] env[69328]: DEBUG nova.virt.hardware [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1129.363861] env[69328]: DEBUG nova.virt.hardware [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1129.365018] env[69328]: DEBUG nova.virt.hardware [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1129.365349] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54ec5145-86b4-4d29-a915-657848ccdf02 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.374588] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-063a4008-d899-486e-bad0-1d98db985652 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.399444] env[69328]: DEBUG oslo_vmware.api [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52bdcf1d-a8ca-3346-1bb0-21e16be9596c, 'name': SearchDatastore_Task, 'duration_secs': 0.023941} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.400226] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f804351a-3c1e-4bf3-bd45-62cbfddc4795 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.406329] env[69328]: DEBUG oslo_vmware.api [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for the task: (returnval){ [ 1129.406329] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52886286-1cad-e53c-24cf-3bd09efd0a91" [ 1129.406329] env[69328]: _type = "Task" [ 1129.406329] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.414875] env[69328]: DEBUG oslo_vmware.api [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52886286-1cad-e53c-24cf-3bd09efd0a91, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.511075] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1129.533535] env[69328]: DEBUG nova.network.neutron [req-80536940-076d-4fea-a3e8-ae936cd1a6e3 req-c10c4640-6dc5-4aa7-ac9c-7d5d613379d1 service nova] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Updated VIF entry in instance network info cache for port 9e189e9a-ecbf-475e-82a4-508c1a0aec74. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1129.534011] env[69328]: DEBUG nova.network.neutron [req-80536940-076d-4fea-a3e8-ae936cd1a6e3 req-c10c4640-6dc5-4aa7-ac9c-7d5d613379d1 service nova] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Updating instance_info_cache with network_info: [{"id": "9e189e9a-ecbf-475e-82a4-508c1a0aec74", "address": "fa:16:3e:40:7c:9a", "network": {"id": "ce3bec03-01f9-4ac9-80e4-bfb944c0bb66", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-545997027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87581f423dc64e4fb9fe1d51ebc68597", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e189e9a-ec", "ovs_interfaceid": "9e189e9a-ecbf-475e-82a4-508c1a0aec74", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1129.610768] env[69328]: DEBUG nova.compute.manager [req-40e16b17-3528-45d5-8b42-2fb755d6815e req-f7092ec8-9be6-4a94-b454-6df191b1773b service nova] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Received event network-vif-plugged-19978029-822a-48e0-b3c1-9d885b82a5f3 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1129.613227] env[69328]: DEBUG oslo_concurrency.lockutils [req-40e16b17-3528-45d5-8b42-2fb755d6815e req-f7092ec8-9be6-4a94-b454-6df191b1773b service nova] Acquiring lock "ae46c18e-15ae-4a47-b05a-a143f10b5ab6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1129.613636] env[69328]: DEBUG oslo_concurrency.lockutils [req-40e16b17-3528-45d5-8b42-2fb755d6815e req-f7092ec8-9be6-4a94-b454-6df191b1773b service nova] Lock "ae46c18e-15ae-4a47-b05a-a143f10b5ab6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.003s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1129.613895] env[69328]: DEBUG oslo_concurrency.lockutils [req-40e16b17-3528-45d5-8b42-2fb755d6815e req-f7092ec8-9be6-4a94-b454-6df191b1773b service nova] Lock "ae46c18e-15ae-4a47-b05a-a143f10b5ab6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1129.614210] env[69328]: DEBUG nova.compute.manager [req-40e16b17-3528-45d5-8b42-2fb755d6815e req-f7092ec8-9be6-4a94-b454-6df191b1773b service nova] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] No waiting events found dispatching network-vif-plugged-19978029-822a-48e0-b3c1-9d885b82a5f3 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1129.614447] env[69328]: WARNING nova.compute.manager [req-40e16b17-3528-45d5-8b42-2fb755d6815e req-f7092ec8-9be6-4a94-b454-6df191b1773b service nova] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Received unexpected event network-vif-plugged-19978029-822a-48e0-b3c1-9d885b82a5f3 for instance with vm_state building and task_state spawning. [ 1129.682016] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.374s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1129.683177] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 1.277s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1129.704580] env[69328]: DEBUG nova.network.neutron [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Successfully updated port: 19978029-822a-48e0-b3c1-9d885b82a5f3 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1129.833804] env[69328]: DEBUG oslo_vmware.api [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3274127, 'name': ReconfigVM_Task, 'duration_secs': 0.355656} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.834132] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] Reconfigured VM instance instance-0000006f to attach disk [datastore1] 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9/32b9acbc-35a0-4d67-ac74-ef46c45fa0b9.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1129.834759] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fba0dfe0-bfb6-42e9-97ce-ad6c4bded3e1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.842436] env[69328]: DEBUG oslo_vmware.api [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 1129.842436] env[69328]: value = "task-3274128" [ 1129.842436] env[69328]: _type = "Task" [ 1129.842436] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.853564] env[69328]: DEBUG oslo_vmware.api [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3274128, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.916622] env[69328]: DEBUG oslo_vmware.api [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52886286-1cad-e53c-24cf-3bd09efd0a91, 'name': SearchDatastore_Task, 'duration_secs': 0.018613} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.916874] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1129.917152] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 33583ef3-252c-45d4-a514-5646f98c5f45/33583ef3-252c-45d4-a514-5646f98c5f45.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1129.917410] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0f12f31c-a937-4bb2-9fe5-2241b15cd4a3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.924547] env[69328]: DEBUG oslo_vmware.api [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for the task: (returnval){ [ 1129.924547] env[69328]: value = "task-3274129" [ 1129.924547] env[69328]: _type = "Task" [ 1129.924547] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.933572] env[69328]: DEBUG oslo_vmware.api [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274129, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.036920] env[69328]: DEBUG oslo_concurrency.lockutils [req-80536940-076d-4fea-a3e8-ae936cd1a6e3 req-c10c4640-6dc5-4aa7-ac9c-7d5d613379d1 service nova] Releasing lock "refresh_cache-b0a1441c-81e2-4131-a2ff-f5042d559d9f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1130.036920] env[69328]: DEBUG nova.compute.manager [req-80536940-076d-4fea-a3e8-ae936cd1a6e3 req-c10c4640-6dc5-4aa7-ac9c-7d5d613379d1 service nova] [instance: ee3609ea-0855-47c2-874c-349c80419781] Received event network-changed-ce79bad7-6bfd-4645-bc55-71dfc049411d {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1130.037107] env[69328]: DEBUG nova.compute.manager [req-80536940-076d-4fea-a3e8-ae936cd1a6e3 req-c10c4640-6dc5-4aa7-ac9c-7d5d613379d1 service nova] [instance: ee3609ea-0855-47c2-874c-349c80419781] Refreshing instance network info cache due to event network-changed-ce79bad7-6bfd-4645-bc55-71dfc049411d. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1130.037325] env[69328]: DEBUG oslo_concurrency.lockutils [req-80536940-076d-4fea-a3e8-ae936cd1a6e3 req-c10c4640-6dc5-4aa7-ac9c-7d5d613379d1 service nova] Acquiring lock "refresh_cache-ee3609ea-0855-47c2-874c-349c80419781" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1130.037474] env[69328]: DEBUG oslo_concurrency.lockutils [req-80536940-076d-4fea-a3e8-ae936cd1a6e3 req-c10c4640-6dc5-4aa7-ac9c-7d5d613379d1 service nova] Acquired lock "refresh_cache-ee3609ea-0855-47c2-874c-349c80419781" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1130.037635] env[69328]: DEBUG nova.network.neutron [req-80536940-076d-4fea-a3e8-ae936cd1a6e3 req-c10c4640-6dc5-4aa7-ac9c-7d5d613379d1 service nova] [instance: ee3609ea-0855-47c2-874c-349c80419781] Refreshing network info cache for port ce79bad7-6bfd-4645-bc55-71dfc049411d {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1130.185808] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Acquiring lock "37b4c6e0-f56b-4ea3-a936-e576a91efe63" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1130.186093] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Lock "37b4c6e0-f56b-4ea3-a936-e576a91efe63" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1130.190635] env[69328]: INFO nova.compute.claims [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1130.195339] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Lock "37b4c6e0-f56b-4ea3-a936-e576a91efe63" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.009s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1130.195776] env[69328]: DEBUG nova.compute.manager [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1130.201743] env[69328]: DEBUG oslo_concurrency.lockutils [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Acquiring lock "03f0adc8-d640-4248-be9d-ab4ba0cbe760" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1130.202038] env[69328]: DEBUG oslo_concurrency.lockutils [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Lock "03f0adc8-d640-4248-be9d-ab4ba0cbe760" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1130.207064] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Acquiring lock "refresh_cache-ae46c18e-15ae-4a47-b05a-a143f10b5ab6" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1130.207213] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Acquired lock "refresh_cache-ae46c18e-15ae-4a47-b05a-a143f10b5ab6" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1130.207359] env[69328]: DEBUG nova.network.neutron [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1130.355842] env[69328]: DEBUG oslo_vmware.api [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3274128, 'name': Rename_Task, 'duration_secs': 0.14884} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.356272] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1130.356919] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-11be99fa-57bd-42fa-8431-187e359f8f9f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.364944] env[69328]: DEBUG oslo_vmware.api [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 1130.364944] env[69328]: value = "task-3274130" [ 1130.364944] env[69328]: _type = "Task" [ 1130.364944] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.377021] env[69328]: DEBUG oslo_vmware.api [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3274130, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.436921] env[69328]: DEBUG oslo_vmware.api [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274129, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.697681] env[69328]: INFO nova.compute.resource_tracker [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Updating resource usage from migration c9d84624-27a9-4e50-abe2-112cbefbaf04 [ 1130.705476] env[69328]: DEBUG nova.compute.utils [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1130.707395] env[69328]: DEBUG nova.compute.manager [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1130.710870] env[69328]: DEBUG nova.compute.manager [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1130.710870] env[69328]: DEBUG nova.network.neutron [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1130.762131] env[69328]: DEBUG nova.policy [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f1eeab0e2b9142dca8bd729045d6959b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '966fcf0bab1e4c4fbcd9055a118263aa', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 1130.771887] env[69328]: DEBUG nova.network.neutron [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1130.856662] env[69328]: DEBUG nova.network.neutron [req-80536940-076d-4fea-a3e8-ae936cd1a6e3 req-c10c4640-6dc5-4aa7-ac9c-7d5d613379d1 service nova] [instance: ee3609ea-0855-47c2-874c-349c80419781] Updated VIF entry in instance network info cache for port ce79bad7-6bfd-4645-bc55-71dfc049411d. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1130.857350] env[69328]: DEBUG nova.network.neutron [req-80536940-076d-4fea-a3e8-ae936cd1a6e3 req-c10c4640-6dc5-4aa7-ac9c-7d5d613379d1 service nova] [instance: ee3609ea-0855-47c2-874c-349c80419781] Updating instance_info_cache with network_info: [{"id": "ce79bad7-6bfd-4645-bc55-71dfc049411d", "address": "fa:16:3e:c6:38:a6", "network": {"id": "ce3bec03-01f9-4ac9-80e4-bfb944c0bb66", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-545997027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.234", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87581f423dc64e4fb9fe1d51ebc68597", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce79bad7-6b", "ovs_interfaceid": "ce79bad7-6bfd-4645-bc55-71dfc049411d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1130.882879] env[69328]: DEBUG oslo_vmware.api [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3274130, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.937623] env[69328]: DEBUG oslo_vmware.api [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274129, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.790513} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.938064] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 33583ef3-252c-45d4-a514-5646f98c5f45/33583ef3-252c-45d4-a514-5646f98c5f45.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1130.938181] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1130.938422] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-40573f93-a6dc-48d1-a562-fba9cd2d640b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.946823] env[69328]: DEBUG oslo_vmware.api [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for the task: (returnval){ [ 1130.946823] env[69328]: value = "task-3274131" [ 1130.946823] env[69328]: _type = "Task" [ 1130.946823] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.951520] env[69328]: DEBUG nova.network.neutron [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Updating instance_info_cache with network_info: [{"id": "19978029-822a-48e0-b3c1-9d885b82a5f3", "address": "fa:16:3e:99:f9:c3", "network": {"id": "3be6b159-5d5c-4752-b79d-0ed6110569d0", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-915151552-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f50ac50ef6ae4abc83a8064746de7029", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe20ef0e-0991-44d7-887d-08dddac0b56b", "external-id": "nsx-vlan-transportzone-991", "segmentation_id": 991, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19978029-82", "ovs_interfaceid": "19978029-822a-48e0-b3c1-9d885b82a5f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1130.965228] env[69328]: DEBUG oslo_vmware.api [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274131, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.021644] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf28f199-af0e-461e-a9c1-49861e573ef0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.030384] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa63b709-d523-431b-97bd-ce194d27767d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.062293] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29741517-cc91-47de-8a11-aac4083f24b5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.070783] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8ced7a1-3e0e-4070-b247-f9f3d794ecdd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.087552] env[69328]: DEBUG nova.compute.provider_tree [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1131.131303] env[69328]: DEBUG nova.network.neutron [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] Successfully created port: c23855e3-70d4-4725-b057-d2d1f6f1d80e {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1131.223422] env[69328]: DEBUG nova.compute.manager [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1131.238304] env[69328]: DEBUG oslo_concurrency.lockutils [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1131.359668] env[69328]: DEBUG oslo_concurrency.lockutils [req-80536940-076d-4fea-a3e8-ae936cd1a6e3 req-c10c4640-6dc5-4aa7-ac9c-7d5d613379d1 service nova] Releasing lock "refresh_cache-ee3609ea-0855-47c2-874c-349c80419781" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1131.376880] env[69328]: DEBUG oslo_vmware.api [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3274130, 'name': PowerOnVM_Task, 'duration_secs': 0.630918} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.377152] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1131.377357] env[69328]: INFO nova.compute.manager [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] Took 8.83 seconds to spawn the instance on the hypervisor. [ 1131.377533] env[69328]: DEBUG nova.compute.manager [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1131.378760] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94a7a1da-0589-4f1d-b965-560a9a690d8e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.457992] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Releasing lock "refresh_cache-ae46c18e-15ae-4a47-b05a-a143f10b5ab6" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1131.457992] env[69328]: DEBUG nova.compute.manager [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Instance network_info: |[{"id": "19978029-822a-48e0-b3c1-9d885b82a5f3", "address": "fa:16:3e:99:f9:c3", "network": {"id": "3be6b159-5d5c-4752-b79d-0ed6110569d0", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-915151552-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f50ac50ef6ae4abc83a8064746de7029", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe20ef0e-0991-44d7-887d-08dddac0b56b", "external-id": "nsx-vlan-transportzone-991", "segmentation_id": 991, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19978029-82", "ovs_interfaceid": "19978029-822a-48e0-b3c1-9d885b82a5f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1131.458285] env[69328]: DEBUG oslo_vmware.api [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274131, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.290892} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.459801] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:99:f9:c3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fe20ef0e-0991-44d7-887d-08dddac0b56b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '19978029-822a-48e0-b3c1-9d885b82a5f3', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1131.465962] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1131.466208] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1131.466566] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1131.467286] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae507552-fef9-4988-a138-fbde5f93b362 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.470121] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5e18a393-dc79-4f19-98e4-18b74cb0a9b5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.505217] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Reconfiguring VM instance instance-00000070 to attach disk [datastore1] 33583ef3-252c-45d4-a514-5646f98c5f45/33583ef3-252c-45d4-a514-5646f98c5f45.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1131.506765] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9c0dcb49-3234-4586-b99e-0e83d5cdca2a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.520761] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1131.520761] env[69328]: value = "task-3274132" [ 1131.520761] env[69328]: _type = "Task" [ 1131.520761] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.530871] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274132, 'name': CreateVM_Task} progress is 15%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.532248] env[69328]: DEBUG oslo_vmware.api [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for the task: (returnval){ [ 1131.532248] env[69328]: value = "task-3274133" [ 1131.532248] env[69328]: _type = "Task" [ 1131.532248] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.540190] env[69328]: DEBUG oslo_vmware.api [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274133, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.590992] env[69328]: DEBUG nova.scheduler.client.report [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1131.642015] env[69328]: DEBUG nova.compute.manager [req-80527f6b-33e3-4b4e-b1e7-53c58b7f1aaa req-348eba82-b26b-48df-85b7-3132fb01feca service nova] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Received event network-changed-19978029-822a-48e0-b3c1-9d885b82a5f3 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1131.642217] env[69328]: DEBUG nova.compute.manager [req-80527f6b-33e3-4b4e-b1e7-53c58b7f1aaa req-348eba82-b26b-48df-85b7-3132fb01feca service nova] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Refreshing instance network info cache due to event network-changed-19978029-822a-48e0-b3c1-9d885b82a5f3. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1131.642442] env[69328]: DEBUG oslo_concurrency.lockutils [req-80527f6b-33e3-4b4e-b1e7-53c58b7f1aaa req-348eba82-b26b-48df-85b7-3132fb01feca service nova] Acquiring lock "refresh_cache-ae46c18e-15ae-4a47-b05a-a143f10b5ab6" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1131.642750] env[69328]: DEBUG oslo_concurrency.lockutils [req-80527f6b-33e3-4b4e-b1e7-53c58b7f1aaa req-348eba82-b26b-48df-85b7-3132fb01feca service nova] Acquired lock "refresh_cache-ae46c18e-15ae-4a47-b05a-a143f10b5ab6" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1131.642750] env[69328]: DEBUG nova.network.neutron [req-80527f6b-33e3-4b4e-b1e7-53c58b7f1aaa req-348eba82-b26b-48df-85b7-3132fb01feca service nova] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Refreshing network info cache for port 19978029-822a-48e0-b3c1-9d885b82a5f3 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1131.897549] env[69328]: INFO nova.compute.manager [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] Took 14.63 seconds to build instance. [ 1132.031948] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274132, 'name': CreateVM_Task, 'duration_secs': 0.460351} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.031948] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1132.032658] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1132.032769] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1132.033142] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1132.033452] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b99cb25-e313-418d-a9ed-c170e290b18e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.041901] env[69328]: DEBUG oslo_vmware.api [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 1132.041901] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5242b4c6-0099-a6ec-c381-3897a0d310d4" [ 1132.041901] env[69328]: _type = "Task" [ 1132.041901] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.044882] env[69328]: DEBUG oslo_vmware.api [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274133, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.054793] env[69328]: DEBUG oslo_vmware.api [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5242b4c6-0099-a6ec-c381-3897a0d310d4, 'name': SearchDatastore_Task, 'duration_secs': 0.010942} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.055133] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1132.055382] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1132.055623] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1132.055770] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1132.055948] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1132.056273] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-af8059c5-37c9-4697-8a00-9797e2ccdc40 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.067991] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1132.068665] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1132.069262] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25a3d2bb-6de3-4d23-b740-1d6478911b2f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.075380] env[69328]: DEBUG oslo_vmware.api [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 1132.075380] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52818725-79cf-bfa9-a4a2-e1d548f8ada4" [ 1132.075380] env[69328]: _type = "Task" [ 1132.075380] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.084009] env[69328]: DEBUG oslo_vmware.api [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52818725-79cf-bfa9-a4a2-e1d548f8ada4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.096384] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.413s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1132.096384] env[69328]: INFO nova.compute.manager [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Migrating [ 1132.105104] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.594s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1132.106972] env[69328]: INFO nova.compute.claims [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: ff815ffb-3422-469e-9b54-b33502826513] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1132.236151] env[69328]: DEBUG nova.compute.manager [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1132.263733] env[69328]: DEBUG nova.virt.hardware [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1132.263984] env[69328]: DEBUG nova.virt.hardware [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1132.264215] env[69328]: DEBUG nova.virt.hardware [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1132.264419] env[69328]: DEBUG nova.virt.hardware [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1132.264570] env[69328]: DEBUG nova.virt.hardware [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1132.264715] env[69328]: DEBUG nova.virt.hardware [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1132.264923] env[69328]: DEBUG nova.virt.hardware [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1132.265125] env[69328]: DEBUG nova.virt.hardware [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1132.265324] env[69328]: DEBUG nova.virt.hardware [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1132.265489] env[69328]: DEBUG nova.virt.hardware [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1132.265660] env[69328]: DEBUG nova.virt.hardware [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1132.266538] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cef4e5bc-f2ac-4924-bae6-b00801ac7175 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.276688] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5ae4eee-2066-4746-9007-9d44ecbea710 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.367500] env[69328]: DEBUG nova.network.neutron [req-80527f6b-33e3-4b4e-b1e7-53c58b7f1aaa req-348eba82-b26b-48df-85b7-3132fb01feca service nova] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Updated VIF entry in instance network info cache for port 19978029-822a-48e0-b3c1-9d885b82a5f3. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1132.367875] env[69328]: DEBUG nova.network.neutron [req-80527f6b-33e3-4b4e-b1e7-53c58b7f1aaa req-348eba82-b26b-48df-85b7-3132fb01feca service nova] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Updating instance_info_cache with network_info: [{"id": "19978029-822a-48e0-b3c1-9d885b82a5f3", "address": "fa:16:3e:99:f9:c3", "network": {"id": "3be6b159-5d5c-4752-b79d-0ed6110569d0", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-915151552-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f50ac50ef6ae4abc83a8064746de7029", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe20ef0e-0991-44d7-887d-08dddac0b56b", "external-id": "nsx-vlan-transportzone-991", "segmentation_id": 991, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19978029-82", "ovs_interfaceid": "19978029-822a-48e0-b3c1-9d885b82a5f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1132.400347] env[69328]: DEBUG oslo_concurrency.lockutils [None req-783887b1-e932-4ced-b1a5-731a7cc8e835 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "32b9acbc-35a0-4d67-ac74-ef46c45fa0b9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.147s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1132.547467] env[69328]: DEBUG oslo_vmware.api [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274133, 'name': ReconfigVM_Task, 'duration_secs': 0.891103} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.547467] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Reconfigured VM instance instance-00000070 to attach disk [datastore1] 33583ef3-252c-45d4-a514-5646f98c5f45/33583ef3-252c-45d4-a514-5646f98c5f45.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1132.547467] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-30263aa4-24f4-4dcd-be9f-243007ba9554 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.555022] env[69328]: DEBUG oslo_vmware.api [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for the task: (returnval){ [ 1132.555022] env[69328]: value = "task-3274134" [ 1132.555022] env[69328]: _type = "Task" [ 1132.555022] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.564586] env[69328]: DEBUG oslo_vmware.api [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274134, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.587794] env[69328]: DEBUG oslo_vmware.api [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52818725-79cf-bfa9-a4a2-e1d548f8ada4, 'name': SearchDatastore_Task, 'duration_secs': 0.011433} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.589254] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-674a4d39-a946-4f8d-8ad0-dd090da7235c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.597255] env[69328]: DEBUG oslo_vmware.api [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 1132.597255] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d94abd-d7c3-8ec4-86d2-973220262648" [ 1132.597255] env[69328]: _type = "Task" [ 1132.597255] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.606687] env[69328]: DEBUG oslo_vmware.api [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d94abd-d7c3-8ec4-86d2-973220262648, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.610958] env[69328]: DEBUG nova.compute.manager [req-29af204e-b639-4f2a-bd2e-3939af9121ec req-c6c60b71-93da-41ba-8293-f1cb25501ebc service nova] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] Received event network-vif-plugged-c23855e3-70d4-4725-b057-d2d1f6f1d80e {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1132.610958] env[69328]: DEBUG oslo_concurrency.lockutils [req-29af204e-b639-4f2a-bd2e-3939af9121ec req-c6c60b71-93da-41ba-8293-f1cb25501ebc service nova] Acquiring lock "79d66d5d-e1a4-4bc0-8e43-db97153867e3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1132.610958] env[69328]: DEBUG oslo_concurrency.lockutils [req-29af204e-b639-4f2a-bd2e-3939af9121ec req-c6c60b71-93da-41ba-8293-f1cb25501ebc service nova] Lock "79d66d5d-e1a4-4bc0-8e43-db97153867e3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1132.610958] env[69328]: DEBUG oslo_concurrency.lockutils [req-29af204e-b639-4f2a-bd2e-3939af9121ec req-c6c60b71-93da-41ba-8293-f1cb25501ebc service nova] Lock "79d66d5d-e1a4-4bc0-8e43-db97153867e3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1132.610958] env[69328]: DEBUG nova.compute.manager [req-29af204e-b639-4f2a-bd2e-3939af9121ec req-c6c60b71-93da-41ba-8293-f1cb25501ebc service nova] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] No waiting events found dispatching network-vif-plugged-c23855e3-70d4-4725-b057-d2d1f6f1d80e {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1132.610958] env[69328]: WARNING nova.compute.manager [req-29af204e-b639-4f2a-bd2e-3939af9121ec req-c6c60b71-93da-41ba-8293-f1cb25501ebc service nova] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] Received unexpected event network-vif-plugged-c23855e3-70d4-4725-b057-d2d1f6f1d80e for instance with vm_state building and task_state spawning. [ 1132.618915] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "refresh_cache-ee3609ea-0855-47c2-874c-349c80419781" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1132.618915] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquired lock "refresh_cache-ee3609ea-0855-47c2-874c-349c80419781" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1132.618915] env[69328]: DEBUG nova.network.neutron [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1132.631660] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1132.632244] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1132.632641] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1132.632806] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1132.632956] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1132.633150] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1132.633354] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1132.633493] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69328) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1132.633640] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager.update_available_resource {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1132.696951] env[69328]: DEBUG nova.network.neutron [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] Successfully updated port: c23855e3-70d4-4725-b057-d2d1f6f1d80e {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1132.870862] env[69328]: DEBUG oslo_concurrency.lockutils [req-80527f6b-33e3-4b4e-b1e7-53c58b7f1aaa req-348eba82-b26b-48df-85b7-3132fb01feca service nova] Releasing lock "refresh_cache-ae46c18e-15ae-4a47-b05a-a143f10b5ab6" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1133.066776] env[69328]: DEBUG oslo_vmware.api [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274134, 'name': Rename_Task, 'duration_secs': 0.156225} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.067179] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1133.067506] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bfed446f-8322-45cb-ac7d-1f8319a6bab2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.075736] env[69328]: DEBUG oslo_vmware.api [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for the task: (returnval){ [ 1133.075736] env[69328]: value = "task-3274135" [ 1133.075736] env[69328]: _type = "Task" [ 1133.075736] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.083889] env[69328]: DEBUG oslo_vmware.api [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274135, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.104721] env[69328]: DEBUG oslo_vmware.api [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d94abd-d7c3-8ec4-86d2-973220262648, 'name': SearchDatastore_Task, 'duration_secs': 0.010887} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.104964] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1133.105245] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] ae46c18e-15ae-4a47-b05a-a143f10b5ab6/ae46c18e-15ae-4a47-b05a-a143f10b5ab6.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1133.105494] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3abd5cfe-c69e-4bde-bd8c-328486465092 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.111872] env[69328]: DEBUG oslo_vmware.api [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 1133.111872] env[69328]: value = "task-3274136" [ 1133.111872] env[69328]: _type = "Task" [ 1133.111872] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.122265] env[69328]: DEBUG oslo_vmware.api [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274136, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.137155] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1133.201135] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Acquiring lock "refresh_cache-79d66d5d-e1a4-4bc0-8e43-db97153867e3" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1133.201289] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Acquired lock "refresh_cache-79d66d5d-e1a4-4bc0-8e43-db97153867e3" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1133.201441] env[69328]: DEBUG nova.network.neutron [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1133.370709] env[69328]: DEBUG nova.network.neutron [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Updating instance_info_cache with network_info: [{"id": "ce79bad7-6bfd-4645-bc55-71dfc049411d", "address": "fa:16:3e:c6:38:a6", "network": {"id": "ce3bec03-01f9-4ac9-80e4-bfb944c0bb66", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-545997027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.234", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87581f423dc64e4fb9fe1d51ebc68597", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce79bad7-6b", "ovs_interfaceid": "ce79bad7-6bfd-4645-bc55-71dfc049411d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1133.402155] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60b0f15f-f25e-4631-bbf6-0d247e13ebdf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.413353] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5833cf56-f9e4-4ad5-80ca-745fd3e51fc5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.446648] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b4f3a55d-4bfd-43f9-b253-d4bc8c6001e6 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "32b9acbc-35a0-4d67-ac74-ef46c45fa0b9" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1133.446924] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b4f3a55d-4bfd-43f9-b253-d4bc8c6001e6 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "32b9acbc-35a0-4d67-ac74-ef46c45fa0b9" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1133.447151] env[69328]: DEBUG nova.compute.manager [None req-b4f3a55d-4bfd-43f9-b253-d4bc8c6001e6 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1133.448241] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfecc5c1-dee0-46ad-a5f5-550bf5379069 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.451360] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfbea9f1-5dce-4009-95db-3bbea793e88d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.459486] env[69328]: DEBUG nova.compute.manager [None req-b4f3a55d-4bfd-43f9-b253-d4bc8c6001e6 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69328) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1133.460116] env[69328]: DEBUG nova.objects.instance [None req-b4f3a55d-4bfd-43f9-b253-d4bc8c6001e6 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lazy-loading 'flavor' on Instance uuid 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1133.464912] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8402d50-63fe-4020-835e-b303b35f8d04 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.482222] env[69328]: DEBUG nova.compute.provider_tree [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1133.588713] env[69328]: DEBUG oslo_vmware.api [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274135, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.624488] env[69328]: DEBUG oslo_vmware.api [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274136, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.737220] env[69328]: DEBUG nova.network.neutron [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1133.876718] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Releasing lock "refresh_cache-ee3609ea-0855-47c2-874c-349c80419781" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1133.893598] env[69328]: DEBUG nova.network.neutron [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] Updating instance_info_cache with network_info: [{"id": "c23855e3-70d4-4725-b057-d2d1f6f1d80e", "address": "fa:16:3e:3d:8d:aa", "network": {"id": "28fe61fa-3278-4402-b132-c0f76c8b360b", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-2079882223-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "966fcf0bab1e4c4fbcd9055a118263aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61a172ee-af3f-473e-b12a-3fee5bf39c8d", "external-id": "nsx-vlan-transportzone-997", "segmentation_id": 997, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc23855e3-70", "ovs_interfaceid": "c23855e3-70d4-4725-b057-d2d1f6f1d80e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1133.985531] env[69328]: DEBUG nova.scheduler.client.report [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1134.087288] env[69328]: DEBUG oslo_vmware.api [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274135, 'name': PowerOnVM_Task, 'duration_secs': 0.629431} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.087618] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1134.087776] env[69328]: INFO nova.compute.manager [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Took 9.15 seconds to spawn the instance on the hypervisor. [ 1134.087953] env[69328]: DEBUG nova.compute.manager [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1134.088762] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04389043-cc8b-47b6-96ff-bbf540bee88c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.123826] env[69328]: DEBUG oslo_vmware.api [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274136, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.568933} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.124158] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] ae46c18e-15ae-4a47-b05a-a143f10b5ab6/ae46c18e-15ae-4a47-b05a-a143f10b5ab6.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1134.124399] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1134.125150] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-afb9a1a7-8732-4e7e-bf61-94b9b0de82a3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.133890] env[69328]: DEBUG oslo_vmware.api [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 1134.133890] env[69328]: value = "task-3274137" [ 1134.133890] env[69328]: _type = "Task" [ 1134.133890] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.144493] env[69328]: DEBUG oslo_vmware.api [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274137, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.396452] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Releasing lock "refresh_cache-79d66d5d-e1a4-4bc0-8e43-db97153867e3" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1134.397134] env[69328]: DEBUG nova.compute.manager [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] Instance network_info: |[{"id": "c23855e3-70d4-4725-b057-d2d1f6f1d80e", "address": "fa:16:3e:3d:8d:aa", "network": {"id": "28fe61fa-3278-4402-b132-c0f76c8b360b", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-2079882223-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "966fcf0bab1e4c4fbcd9055a118263aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61a172ee-af3f-473e-b12a-3fee5bf39c8d", "external-id": "nsx-vlan-transportzone-997", "segmentation_id": 997, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc23855e3-70", "ovs_interfaceid": "c23855e3-70d4-4725-b057-d2d1f6f1d80e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1134.397602] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3d:8d:aa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '61a172ee-af3f-473e-b12a-3fee5bf39c8d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c23855e3-70d4-4725-b057-d2d1f6f1d80e', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1134.405264] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Creating folder: Project (966fcf0bab1e4c4fbcd9055a118263aa). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1134.405555] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-90d21d28-008a-4852-9275-c564049fa0b0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.419616] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Created folder: Project (966fcf0bab1e4c4fbcd9055a118263aa) in parent group-v653649. [ 1134.419806] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Creating folder: Instances. Parent ref: group-v653952. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1134.420829] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0ea4edb0-7a29-4580-b56e-d4e025e7eb0e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.431120] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Created folder: Instances in parent group-v653952. [ 1134.431352] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1134.431554] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1134.431761] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0efd5122-467f-4b1f-99c7-c40416f4bc89 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.450978] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1134.450978] env[69328]: value = "task-3274140" [ 1134.450978] env[69328]: _type = "Task" [ 1134.450978] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.458907] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274140, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.467881] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4f3a55d-4bfd-43f9-b253-d4bc8c6001e6 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1134.468163] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f9172b70-5c37-491b-8e28-55f1d33badce {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.474496] env[69328]: DEBUG oslo_vmware.api [None req-b4f3a55d-4bfd-43f9-b253-d4bc8c6001e6 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 1134.474496] env[69328]: value = "task-3274141" [ 1134.474496] env[69328]: _type = "Task" [ 1134.474496] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.483526] env[69328]: DEBUG oslo_vmware.api [None req-b4f3a55d-4bfd-43f9-b253-d4bc8c6001e6 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3274141, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.490937] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.386s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1134.491819] env[69328]: DEBUG nova.compute.manager [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: ff815ffb-3422-469e-9b54-b33502826513] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1134.495959] env[69328]: DEBUG oslo_concurrency.lockutils [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.258s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1134.498207] env[69328]: INFO nova.compute.claims [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1134.610838] env[69328]: INFO nova.compute.manager [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Took 14.92 seconds to build instance. [ 1134.641316] env[69328]: DEBUG nova.compute.manager [req-34491607-98f5-4f21-b66b-ba5dea82620a req-90077c8a-4869-43ad-9a16-0de1ceeca23d service nova] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] Received event network-changed-c23855e3-70d4-4725-b057-d2d1f6f1d80e {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1134.641545] env[69328]: DEBUG nova.compute.manager [req-34491607-98f5-4f21-b66b-ba5dea82620a req-90077c8a-4869-43ad-9a16-0de1ceeca23d service nova] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] Refreshing instance network info cache due to event network-changed-c23855e3-70d4-4725-b057-d2d1f6f1d80e. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1134.641879] env[69328]: DEBUG oslo_concurrency.lockutils [req-34491607-98f5-4f21-b66b-ba5dea82620a req-90077c8a-4869-43ad-9a16-0de1ceeca23d service nova] Acquiring lock "refresh_cache-79d66d5d-e1a4-4bc0-8e43-db97153867e3" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1134.642216] env[69328]: DEBUG oslo_concurrency.lockutils [req-34491607-98f5-4f21-b66b-ba5dea82620a req-90077c8a-4869-43ad-9a16-0de1ceeca23d service nova] Acquired lock "refresh_cache-79d66d5d-e1a4-4bc0-8e43-db97153867e3" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1134.642463] env[69328]: DEBUG nova.network.neutron [req-34491607-98f5-4f21-b66b-ba5dea82620a req-90077c8a-4869-43ad-9a16-0de1ceeca23d service nova] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] Refreshing network info cache for port c23855e3-70d4-4725-b057-d2d1f6f1d80e {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1134.650232] env[69328]: DEBUG oslo_vmware.api [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274137, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068612} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.650497] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1134.651280] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26aee44e-e169-4268-80b6-b12dd7773df0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.674616] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] ae46c18e-15ae-4a47-b05a-a143f10b5ab6/ae46c18e-15ae-4a47-b05a-a143f10b5ab6.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1134.675364] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8bbe3fed-b8d7-4969-a552-af16b0026866 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.695890] env[69328]: DEBUG oslo_vmware.api [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 1134.695890] env[69328]: value = "task-3274142" [ 1134.695890] env[69328]: _type = "Task" [ 1134.695890] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.704731] env[69328]: DEBUG oslo_vmware.api [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274142, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.961703] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274140, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.987066] env[69328]: DEBUG oslo_vmware.api [None req-b4f3a55d-4bfd-43f9-b253-d4bc8c6001e6 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3274141, 'name': PowerOffVM_Task, 'duration_secs': 0.388099} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.987066] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4f3a55d-4bfd-43f9-b253-d4bc8c6001e6 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1134.987066] env[69328]: DEBUG nova.compute.manager [None req-b4f3a55d-4bfd-43f9-b253-d4bc8c6001e6 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1134.987662] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1595d501-b6e3-4183-9f63-650d2f90f366 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.004437] env[69328]: DEBUG nova.compute.utils [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1135.007868] env[69328]: DEBUG nova.compute.manager [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: ff815ffb-3422-469e-9b54-b33502826513] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1135.007868] env[69328]: DEBUG nova.network.neutron [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: ff815ffb-3422-469e-9b54-b33502826513] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1135.062107] env[69328]: DEBUG nova.policy [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '929ab12fcdb943a48039c7508e6a0b35', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '088bc9e3aeb449baa0a522342d57d183', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 1135.113209] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e1aaf386-3d52-4570-8319-d673b85301a5 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "33583ef3-252c-45d4-a514-5646f98c5f45" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.438s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1135.206018] env[69328]: DEBUG oslo_vmware.api [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274142, 'name': ReconfigVM_Task, 'duration_secs': 0.414791} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.206379] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Reconfigured VM instance instance-00000071 to attach disk [datastore1] ae46c18e-15ae-4a47-b05a-a143f10b5ab6/ae46c18e-15ae-4a47-b05a-a143f10b5ab6.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1135.206999] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-aec3b111-e1ad-477c-b10b-b7026e423303 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.214102] env[69328]: DEBUG oslo_vmware.api [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 1135.214102] env[69328]: value = "task-3274143" [ 1135.214102] env[69328]: _type = "Task" [ 1135.214102] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.223351] env[69328]: DEBUG oslo_vmware.api [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274143, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.395879] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7f6f975-4e67-421d-b053-64997fc30d91 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.416631] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Updating instance 'ee3609ea-0855-47c2-874c-349c80419781' progress to 0 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1135.424272] env[69328]: DEBUG nova.network.neutron [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: ff815ffb-3422-469e-9b54-b33502826513] Successfully created port: ecf2d696-3969-4c5e-ac8c-0578b4981440 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1135.462169] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274140, 'name': CreateVM_Task, 'duration_secs': 0.562041} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.463200] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1135.463738] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1135.463922] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1135.464285] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1135.464789] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec5c0794-274e-47d2-9e68-ecb69503ef5b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.470323] env[69328]: DEBUG oslo_vmware.api [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Waiting for the task: (returnval){ [ 1135.470323] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]526ae531-a0d0-0cc7-e533-4af44a50165f" [ 1135.470323] env[69328]: _type = "Task" [ 1135.470323] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.478574] env[69328]: DEBUG oslo_vmware.api [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]526ae531-a0d0-0cc7-e533-4af44a50165f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.499658] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b4f3a55d-4bfd-43f9-b253-d4bc8c6001e6 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "32b9acbc-35a0-4d67-ac74-ef46c45fa0b9" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.053s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1135.508323] env[69328]: DEBUG nova.compute.manager [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: ff815ffb-3422-469e-9b54-b33502826513] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1135.560183] env[69328]: DEBUG nova.network.neutron [req-34491607-98f5-4f21-b66b-ba5dea82620a req-90077c8a-4869-43ad-9a16-0de1ceeca23d service nova] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] Updated VIF entry in instance network info cache for port c23855e3-70d4-4725-b057-d2d1f6f1d80e. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1135.560543] env[69328]: DEBUG nova.network.neutron [req-34491607-98f5-4f21-b66b-ba5dea82620a req-90077c8a-4869-43ad-9a16-0de1ceeca23d service nova] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] Updating instance_info_cache with network_info: [{"id": "c23855e3-70d4-4725-b057-d2d1f6f1d80e", "address": "fa:16:3e:3d:8d:aa", "network": {"id": "28fe61fa-3278-4402-b132-c0f76c8b360b", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-2079882223-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "966fcf0bab1e4c4fbcd9055a118263aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61a172ee-af3f-473e-b12a-3fee5bf39c8d", "external-id": "nsx-vlan-transportzone-997", "segmentation_id": 997, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc23855e3-70", "ovs_interfaceid": "c23855e3-70d4-4725-b057-d2d1f6f1d80e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1135.732701] env[69328]: DEBUG oslo_vmware.api [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274143, 'name': Rename_Task, 'duration_secs': 0.15499} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.732904] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1135.733058] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-35c16710-0734-49d4-a97f-8b25d3962f3e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.741880] env[69328]: DEBUG oslo_vmware.api [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 1135.741880] env[69328]: value = "task-3274144" [ 1135.741880] env[69328]: _type = "Task" [ 1135.741880] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.753389] env[69328]: DEBUG oslo_vmware.api [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274144, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.817712] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98b4c17e-77c8-410f-a63f-d02cf737e2fd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.824915] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b210b285-09b4-4637-8fe1-bf415b21c215 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.859754] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-432226d3-722b-4d1e-bcda-a260a7f8b7de {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.868668] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22e7e668-52a7-4892-912c-56ab664b4466 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.882870] env[69328]: DEBUG nova.compute.provider_tree [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1135.924182] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1135.924713] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-70aa2c05-ac9b-4db1-a57f-da7f35679acd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.932470] env[69328]: DEBUG oslo_vmware.api [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 1135.932470] env[69328]: value = "task-3274145" [ 1135.932470] env[69328]: _type = "Task" [ 1135.932470] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.942145] env[69328]: DEBUG oslo_vmware.api [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3274145, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.985023] env[69328]: DEBUG oslo_vmware.api [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]526ae531-a0d0-0cc7-e533-4af44a50165f, 'name': SearchDatastore_Task, 'duration_secs': 0.01079} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.985023] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1135.985023] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1135.985284] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1135.985652] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1135.985652] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1135.985982] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-69a87c1d-e648-44b3-9d6d-d850f88b13f3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.996364] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1135.996556] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1135.997398] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a03ba3a-eac0-44cd-976f-801c5729a373 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.004901] env[69328]: DEBUG oslo_vmware.api [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Waiting for the task: (returnval){ [ 1136.004901] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]520e4ab5-cae0-b641-b551-499ab9450c40" [ 1136.004901] env[69328]: _type = "Task" [ 1136.004901] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.023382] env[69328]: DEBUG oslo_vmware.api [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]520e4ab5-cae0-b641-b551-499ab9450c40, 'name': SearchDatastore_Task, 'duration_secs': 0.010679} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.024616] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d35cbf8b-c795-4e71-964f-b834cabc1337 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.032872] env[69328]: DEBUG oslo_vmware.api [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Waiting for the task: (returnval){ [ 1136.032872] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d40101-010b-5437-a136-c36e37a0dcd7" [ 1136.032872] env[69328]: _type = "Task" [ 1136.032872] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.043107] env[69328]: DEBUG oslo_vmware.api [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d40101-010b-5437-a136-c36e37a0dcd7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.062964] env[69328]: DEBUG oslo_concurrency.lockutils [req-34491607-98f5-4f21-b66b-ba5dea82620a req-90077c8a-4869-43ad-9a16-0de1ceeca23d service nova] Releasing lock "refresh_cache-79d66d5d-e1a4-4bc0-8e43-db97153867e3" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1136.107466] env[69328]: DEBUG nova.compute.manager [req-01c0f330-fffd-4d4c-9c7b-8aa47afbb75c req-260d7db6-eb9e-4f89-a11e-0d0f19320650 service nova] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Received event network-changed-13436ecc-0cb3-4c13-bf18-f81195196ffd {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1136.107623] env[69328]: DEBUG nova.compute.manager [req-01c0f330-fffd-4d4c-9c7b-8aa47afbb75c req-260d7db6-eb9e-4f89-a11e-0d0f19320650 service nova] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Refreshing instance network info cache due to event network-changed-13436ecc-0cb3-4c13-bf18-f81195196ffd. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1136.108032] env[69328]: DEBUG oslo_concurrency.lockutils [req-01c0f330-fffd-4d4c-9c7b-8aa47afbb75c req-260d7db6-eb9e-4f89-a11e-0d0f19320650 service nova] Acquiring lock "refresh_cache-de8e6616-0460-4a6e-918c-a27818da96e2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1136.108286] env[69328]: DEBUG oslo_concurrency.lockutils [req-01c0f330-fffd-4d4c-9c7b-8aa47afbb75c req-260d7db6-eb9e-4f89-a11e-0d0f19320650 service nova] Acquired lock "refresh_cache-de8e6616-0460-4a6e-918c-a27818da96e2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1136.108525] env[69328]: DEBUG nova.network.neutron [req-01c0f330-fffd-4d4c-9c7b-8aa47afbb75c req-260d7db6-eb9e-4f89-a11e-0d0f19320650 service nova] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Refreshing network info cache for port 13436ecc-0cb3-4c13-bf18-f81195196ffd {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1136.203526] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9aaf1844-f000-4e12-b49a-d70f29c67856 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "32b9acbc-35a0-4d67-ac74-ef46c45fa0b9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1136.203526] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9aaf1844-f000-4e12-b49a-d70f29c67856 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "32b9acbc-35a0-4d67-ac74-ef46c45fa0b9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1136.203526] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9aaf1844-f000-4e12-b49a-d70f29c67856 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "32b9acbc-35a0-4d67-ac74-ef46c45fa0b9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1136.203958] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9aaf1844-f000-4e12-b49a-d70f29c67856 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "32b9acbc-35a0-4d67-ac74-ef46c45fa0b9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1136.203958] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9aaf1844-f000-4e12-b49a-d70f29c67856 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "32b9acbc-35a0-4d67-ac74-ef46c45fa0b9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1136.206396] env[69328]: INFO nova.compute.manager [None req-9aaf1844-f000-4e12-b49a-d70f29c67856 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] Terminating instance [ 1136.252533] env[69328]: DEBUG oslo_vmware.api [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274144, 'name': PowerOnVM_Task, 'duration_secs': 0.510246} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.253716] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1136.253716] env[69328]: INFO nova.compute.manager [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Took 6.92 seconds to spawn the instance on the hypervisor. [ 1136.253716] env[69328]: DEBUG nova.compute.manager [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1136.254019] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7adfe17-6d5e-440b-a9da-2a550cbb8a11 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.388652] env[69328]: DEBUG nova.scheduler.client.report [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1136.446459] env[69328]: DEBUG oslo_vmware.api [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3274145, 'name': PowerOffVM_Task, 'duration_secs': 0.237904} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.446746] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1136.446940] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Updating instance 'ee3609ea-0855-47c2-874c-349c80419781' progress to 17 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1136.524781] env[69328]: DEBUG nova.compute.manager [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: ff815ffb-3422-469e-9b54-b33502826513] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1136.543985] env[69328]: DEBUG oslo_vmware.api [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d40101-010b-5437-a136-c36e37a0dcd7, 'name': SearchDatastore_Task, 'duration_secs': 0.010733} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.546193] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1136.546512] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 79d66d5d-e1a4-4bc0-8e43-db97153867e3/79d66d5d-e1a4-4bc0-8e43-db97153867e3.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1136.546978] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f48f540b-8f7f-43e4-94d7-fd80902aee16 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.554033] env[69328]: DEBUG nova.virt.hardware [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1136.554033] env[69328]: DEBUG nova.virt.hardware [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1136.554282] env[69328]: DEBUG nova.virt.hardware [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1136.554414] env[69328]: DEBUG nova.virt.hardware [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1136.554560] env[69328]: DEBUG nova.virt.hardware [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1136.554956] env[69328]: DEBUG nova.virt.hardware [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1136.554956] env[69328]: DEBUG nova.virt.hardware [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1136.555122] env[69328]: DEBUG nova.virt.hardware [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1136.555279] env[69328]: DEBUG nova.virt.hardware [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1136.555447] env[69328]: DEBUG nova.virt.hardware [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1136.555621] env[69328]: DEBUG nova.virt.hardware [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1136.556974] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ef66e47-6d35-4a04-a443-fdf4e418867c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.560979] env[69328]: DEBUG oslo_vmware.api [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Waiting for the task: (returnval){ [ 1136.560979] env[69328]: value = "task-3274146" [ 1136.560979] env[69328]: _type = "Task" [ 1136.560979] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.568754] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e1df114-a85d-4b4c-b5d9-4d7553bab535 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.576337] env[69328]: DEBUG oslo_vmware.api [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Task: {'id': task-3274146, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.711157] env[69328]: DEBUG nova.compute.manager [None req-9aaf1844-f000-4e12-b49a-d70f29c67856 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1136.711406] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9aaf1844-f000-4e12-b49a-d70f29c67856 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1136.712521] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dbd5a19-8a70-44e0-993d-643492b1e67b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.721173] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9aaf1844-f000-4e12-b49a-d70f29c67856 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1136.721440] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7de3d3f5-6e86-48d2-a3e8-cfcce19f8208 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.782116] env[69328]: INFO nova.compute.manager [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Took 14.07 seconds to build instance. [ 1136.794208] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9aaf1844-f000-4e12-b49a-d70f29c67856 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1136.794525] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9aaf1844-f000-4e12-b49a-d70f29c67856 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1136.794756] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-9aaf1844-f000-4e12-b49a-d70f29c67856 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Deleting the datastore file [datastore1] 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1136.795070] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5de23074-0481-4e8a-a44f-66b77522184b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.805248] env[69328]: DEBUG oslo_vmware.api [None req-9aaf1844-f000-4e12-b49a-d70f29c67856 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 1136.805248] env[69328]: value = "task-3274148" [ 1136.805248] env[69328]: _type = "Task" [ 1136.805248] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.818895] env[69328]: DEBUG oslo_vmware.api [None req-9aaf1844-f000-4e12-b49a-d70f29c67856 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3274148, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.898343] env[69328]: DEBUG oslo_concurrency.lockutils [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.402s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1136.898926] env[69328]: DEBUG nova.compute.manager [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1136.902950] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 3.766s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1136.903237] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1136.903451] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69328) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1136.904885] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08b81ab5-27c8-4dc4-9de2-e0a3df3b4cff {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.918042] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f409b7a-f3cd-45f4-858f-15461a21e9d8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.941427] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e3b7035-2a1a-4b76-a34c-646c4ef7e234 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.951996] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b87de8e-05bb-4d0f-baca-6a6a5c444a30 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.962343] env[69328]: DEBUG nova.virt.hardware [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:34:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1136.962343] env[69328]: DEBUG nova.virt.hardware [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1136.962343] env[69328]: DEBUG nova.virt.hardware [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1136.962343] env[69328]: DEBUG nova.virt.hardware [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1136.962343] env[69328]: DEBUG nova.virt.hardware [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1136.962343] env[69328]: DEBUG nova.virt.hardware [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1136.962343] env[69328]: DEBUG nova.virt.hardware [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1136.962343] env[69328]: DEBUG nova.virt.hardware [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1136.962343] env[69328]: DEBUG nova.virt.hardware [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1136.962719] env[69328]: DEBUG nova.virt.hardware [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1136.962940] env[69328]: DEBUG nova.virt.hardware [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1136.972401] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1beb7b2a-889e-4759-b3f6-ed344179feef {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.017588] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178548MB free_disk=115GB free_vcpus=48 pci_devices=None {{(pid=69328) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1137.017588] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1137.017588] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1137.021115] env[69328]: DEBUG oslo_vmware.api [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 1137.021115] env[69328]: value = "task-3274149" [ 1137.021115] env[69328]: _type = "Task" [ 1137.021115] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.031721] env[69328]: DEBUG oslo_vmware.api [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3274149, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.075332] env[69328]: DEBUG oslo_vmware.api [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Task: {'id': task-3274146, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.115443] env[69328]: DEBUG nova.compute.manager [req-83a60cc9-88b4-41ee-a3e6-79a86b0157a6 req-65a26ec4-57f5-4109-8bce-c862a2d9614a service nova] [instance: ff815ffb-3422-469e-9b54-b33502826513] Received event network-vif-plugged-ecf2d696-3969-4c5e-ac8c-0578b4981440 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1137.115652] env[69328]: DEBUG oslo_concurrency.lockutils [req-83a60cc9-88b4-41ee-a3e6-79a86b0157a6 req-65a26ec4-57f5-4109-8bce-c862a2d9614a service nova] Acquiring lock "ff815ffb-3422-469e-9b54-b33502826513-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1137.115897] env[69328]: DEBUG oslo_concurrency.lockutils [req-83a60cc9-88b4-41ee-a3e6-79a86b0157a6 req-65a26ec4-57f5-4109-8bce-c862a2d9614a service nova] Lock "ff815ffb-3422-469e-9b54-b33502826513-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1137.116082] env[69328]: DEBUG oslo_concurrency.lockutils [req-83a60cc9-88b4-41ee-a3e6-79a86b0157a6 req-65a26ec4-57f5-4109-8bce-c862a2d9614a service nova] Lock "ff815ffb-3422-469e-9b54-b33502826513-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1137.116308] env[69328]: DEBUG nova.compute.manager [req-83a60cc9-88b4-41ee-a3e6-79a86b0157a6 req-65a26ec4-57f5-4109-8bce-c862a2d9614a service nova] [instance: ff815ffb-3422-469e-9b54-b33502826513] No waiting events found dispatching network-vif-plugged-ecf2d696-3969-4c5e-ac8c-0578b4981440 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1137.116481] env[69328]: WARNING nova.compute.manager [req-83a60cc9-88b4-41ee-a3e6-79a86b0157a6 req-65a26ec4-57f5-4109-8bce-c862a2d9614a service nova] [instance: ff815ffb-3422-469e-9b54-b33502826513] Received unexpected event network-vif-plugged-ecf2d696-3969-4c5e-ac8c-0578b4981440 for instance with vm_state building and task_state spawning. [ 1137.196054] env[69328]: DEBUG nova.network.neutron [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: ff815ffb-3422-469e-9b54-b33502826513] Successfully updated port: ecf2d696-3969-4c5e-ac8c-0578b4981440 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1137.289785] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9a8db9f7-0432-432e-8e4f-05cf2565a8b1 tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lock "ae46c18e-15ae-4a47-b05a-a143f10b5ab6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.582s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1137.322896] env[69328]: DEBUG oslo_vmware.api [None req-9aaf1844-f000-4e12-b49a-d70f29c67856 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3274148, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.389123} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.323217] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-9aaf1844-f000-4e12-b49a-d70f29c67856 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1137.323412] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9aaf1844-f000-4e12-b49a-d70f29c67856 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1137.323590] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9aaf1844-f000-4e12-b49a-d70f29c67856 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1137.323770] env[69328]: INFO nova.compute.manager [None req-9aaf1844-f000-4e12-b49a-d70f29c67856 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1137.324027] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9aaf1844-f000-4e12-b49a-d70f29c67856 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1137.324340] env[69328]: DEBUG nova.compute.manager [-] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1137.324340] env[69328]: DEBUG nova.network.neutron [-] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1137.405720] env[69328]: DEBUG nova.compute.utils [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1137.407140] env[69328]: DEBUG nova.compute.manager [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1137.407300] env[69328]: DEBUG nova.network.neutron [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1137.426575] env[69328]: DEBUG nova.network.neutron [req-01c0f330-fffd-4d4c-9c7b-8aa47afbb75c req-260d7db6-eb9e-4f89-a11e-0d0f19320650 service nova] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Updated VIF entry in instance network info cache for port 13436ecc-0cb3-4c13-bf18-f81195196ffd. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1137.426921] env[69328]: DEBUG nova.network.neutron [req-01c0f330-fffd-4d4c-9c7b-8aa47afbb75c req-260d7db6-eb9e-4f89-a11e-0d0f19320650 service nova] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Updating instance_info_cache with network_info: [{"id": "13436ecc-0cb3-4c13-bf18-f81195196ffd", "address": "fa:16:3e:2e:1b:14", "network": {"id": "aed15283-4a79-4e99-8b6c-49cf754138de", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1271042528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30209bc93a4042488f15c73b7e4733d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13436ecc-0c", "ovs_interfaceid": "13436ecc-0cb3-4c13-bf18-f81195196ffd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1137.517096] env[69328]: DEBUG nova.policy [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '781cfc30588942789f4e7cda072b2f68', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0e9e98f83e974a32b0db6ce5e8442012', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 1137.546523] env[69328]: DEBUG oslo_vmware.api [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3274149, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.573336] env[69328]: DEBUG oslo_vmware.api [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Task: {'id': task-3274146, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.536096} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.573608] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 79d66d5d-e1a4-4bc0-8e43-db97153867e3/79d66d5d-e1a4-4bc0-8e43-db97153867e3.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1137.573819] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1137.574122] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-735824a2-67c7-4498-91cc-a6e4ec502654 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.585596] env[69328]: DEBUG oslo_vmware.api [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Waiting for the task: (returnval){ [ 1137.585596] env[69328]: value = "task-3274150" [ 1137.585596] env[69328]: _type = "Task" [ 1137.585596] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.597185] env[69328]: DEBUG oslo_vmware.api [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Task: {'id': task-3274150, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.702844] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquiring lock "refresh_cache-ff815ffb-3422-469e-9b54-b33502826513" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1137.703024] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquired lock "refresh_cache-ff815ffb-3422-469e-9b54-b33502826513" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1137.703188] env[69328]: DEBUG nova.network.neutron [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: ff815ffb-3422-469e-9b54-b33502826513] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1137.910722] env[69328]: DEBUG nova.compute.manager [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1137.929351] env[69328]: DEBUG oslo_concurrency.lockutils [req-01c0f330-fffd-4d4c-9c7b-8aa47afbb75c req-260d7db6-eb9e-4f89-a11e-0d0f19320650 service nova] Releasing lock "refresh_cache-de8e6616-0460-4a6e-918c-a27818da96e2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1138.039947] env[69328]: DEBUG nova.network.neutron [-] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1138.041636] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Applying migration context for instance ee3609ea-0855-47c2-874c-349c80419781 as it has an incoming, in-progress migration c9d84624-27a9-4e50-abe2-112cbefbaf04. Migration status is migrating {{(pid=69328) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1138.043812] env[69328]: INFO nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: ee3609ea-0855-47c2-874c-349c80419781] Updating resource usage from migration c9d84624-27a9-4e50-abe2-112cbefbaf04 [ 1138.050043] env[69328]: DEBUG oslo_vmware.api [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3274149, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.070947] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance b0a1441c-81e2-4131-a2ff-f5042d559d9f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1138.071117] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance a0b663eb-31b0-4de1-94bc-660a7d9c1c7b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1138.071259] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance f1be93b2-08db-41fe-87c4-f4e5f964cfa4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1138.071368] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance c751ef77-c3be-46cd-b7eb-fe139bf0998b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1138.071485] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 1413dcfe-3570-4657-b811-81a1acc159d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1138.071600] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 52c87371-4142-40d6-ac68-804aabd9f823 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1138.071712] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1138.071822] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 275ef1ed-8e60-4151-b548-e22e5bd8efe2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1138.071930] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance c1829dcf-3608-4955-bd50-eb9ee27d38e1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1138.072050] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance fb2d04d8-cff6-414c-9d50-3ab61729546d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1138.072191] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance de8e6616-0460-4a6e-918c-a27818da96e2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1138.072306] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1138.072415] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1138.072662] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 33583ef3-252c-45d4-a514-5646f98c5f45 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1138.072662] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance ae46c18e-15ae-4a47-b05a-a143f10b5ab6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1138.072737] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 79d66d5d-e1a4-4bc0-8e43-db97153867e3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1138.072829] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Migration c9d84624-27a9-4e50-abe2-112cbefbaf04 is active on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1138.072963] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance ee3609ea-0855-47c2-874c-349c80419781 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1138.073056] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance ff815ffb-3422-469e-9b54-b33502826513 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1138.073165] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 03f0adc8-d640-4248-be9d-ab4ba0cbe760 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1138.073516] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Total usable vcpus: 48, total allocated vcpus: 20 {{(pid=69328) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1138.073672] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4416MB phys_disk=200GB used_disk=17GB total_vcpus=48 used_vcpus=20 pci_stats=[] {{(pid=69328) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1138.095945] env[69328]: DEBUG oslo_vmware.api [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Task: {'id': task-3274150, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.156862} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.098657] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1138.101422] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccf7d5fa-dfda-4539-9579-41ad05139387 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.132572] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] Reconfiguring VM instance instance-00000072 to attach disk [datastore2] 79d66d5d-e1a4-4bc0-8e43-db97153867e3/79d66d5d-e1a4-4bc0-8e43-db97153867e3.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1138.136766] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-627e74aa-95d7-4c05-86f5-0c11c1dcaa3e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.155943] env[69328]: DEBUG nova.compute.manager [req-91eb1ad5-f5ae-4c23-8968-706d5782bc1d req-82922bde-9ac2-4e4a-b814-1a9a6ea8ac09 service nova] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Received event network-changed-509b2377-84e7-48a6-b2ed-811f288cc65c {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1138.155943] env[69328]: DEBUG nova.compute.manager [req-91eb1ad5-f5ae-4c23-8968-706d5782bc1d req-82922bde-9ac2-4e4a-b814-1a9a6ea8ac09 service nova] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Refreshing instance network info cache due to event network-changed-509b2377-84e7-48a6-b2ed-811f288cc65c. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1138.155943] env[69328]: DEBUG oslo_concurrency.lockutils [req-91eb1ad5-f5ae-4c23-8968-706d5782bc1d req-82922bde-9ac2-4e4a-b814-1a9a6ea8ac09 service nova] Acquiring lock "refresh_cache-33583ef3-252c-45d4-a514-5646f98c5f45" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1138.155943] env[69328]: DEBUG oslo_concurrency.lockutils [req-91eb1ad5-f5ae-4c23-8968-706d5782bc1d req-82922bde-9ac2-4e4a-b814-1a9a6ea8ac09 service nova] Acquired lock "refresh_cache-33583ef3-252c-45d4-a514-5646f98c5f45" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1138.155943] env[69328]: DEBUG nova.network.neutron [req-91eb1ad5-f5ae-4c23-8968-706d5782bc1d req-82922bde-9ac2-4e4a-b814-1a9a6ea8ac09 service nova] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Refreshing network info cache for port 509b2377-84e7-48a6-b2ed-811f288cc65c {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1138.163863] env[69328]: DEBUG oslo_vmware.api [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Waiting for the task: (returnval){ [ 1138.163863] env[69328]: value = "task-3274151" [ 1138.163863] env[69328]: _type = "Task" [ 1138.163863] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.179068] env[69328]: DEBUG oslo_vmware.api [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Task: {'id': task-3274151, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.244712] env[69328]: DEBUG nova.network.neutron [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: ff815ffb-3422-469e-9b54-b33502826513] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1138.307182] env[69328]: DEBUG nova.network.neutron [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] Successfully created port: 372b585b-44c4-4862-adf0-76a6931af0cd {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1138.450054] env[69328]: DEBUG nova.network.neutron [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: ff815ffb-3422-469e-9b54-b33502826513] Updating instance_info_cache with network_info: [{"id": "ecf2d696-3969-4c5e-ac8c-0578b4981440", "address": "fa:16:3e:15:2d:17", "network": {"id": "2e8bdd1b-0cca-4f7c-bba3-ff1750073020", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2058593014-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "088bc9e3aeb449baa0a522342d57d183", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapecf2d696-39", "ovs_interfaceid": "ecf2d696-3969-4c5e-ac8c-0578b4981440", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1138.463066] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-270ac4ad-0d33-4aa6-a276-06628c7fc88f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.473281] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d034e43-673d-4521-89ec-f62aaeb0a81a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.504929] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f21b1680-6652-419c-9f8e-9893149395b6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.513292] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4998052-a51a-4a9f-bf6b-59a01e39839a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.529013] env[69328]: DEBUG nova.compute.provider_tree [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1138.549158] env[69328]: INFO nova.compute.manager [-] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] Took 1.22 seconds to deallocate network for instance. [ 1138.549351] env[69328]: DEBUG oslo_vmware.api [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3274149, 'name': ReconfigVM_Task, 'duration_secs': 1.292362} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.551100] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Updating instance 'ee3609ea-0855-47c2-874c-349c80419781' progress to 33 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1138.674033] env[69328]: DEBUG oslo_vmware.api [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Task: {'id': task-3274151, 'name': ReconfigVM_Task, 'duration_secs': 0.480006} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.674415] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] Reconfigured VM instance instance-00000072 to attach disk [datastore2] 79d66d5d-e1a4-4bc0-8e43-db97153867e3/79d66d5d-e1a4-4bc0-8e43-db97153867e3.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1138.675080] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-57a402bb-5e3c-462b-bece-db4054822089 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.684512] env[69328]: DEBUG oslo_vmware.api [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Waiting for the task: (returnval){ [ 1138.684512] env[69328]: value = "task-3274152" [ 1138.684512] env[69328]: _type = "Task" [ 1138.684512] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.693489] env[69328]: DEBUG oslo_vmware.api [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Task: {'id': task-3274152, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.916877] env[69328]: DEBUG nova.network.neutron [req-91eb1ad5-f5ae-4c23-8968-706d5782bc1d req-82922bde-9ac2-4e4a-b814-1a9a6ea8ac09 service nova] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Updated VIF entry in instance network info cache for port 509b2377-84e7-48a6-b2ed-811f288cc65c. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1138.917263] env[69328]: DEBUG nova.network.neutron [req-91eb1ad5-f5ae-4c23-8968-706d5782bc1d req-82922bde-9ac2-4e4a-b814-1a9a6ea8ac09 service nova] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Updating instance_info_cache with network_info: [{"id": "509b2377-84e7-48a6-b2ed-811f288cc65c", "address": "fa:16:3e:f8:6d:5c", "network": {"id": "aed15283-4a79-4e99-8b6c-49cf754138de", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1271042528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30209bc93a4042488f15c73b7e4733d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap509b2377-84", "ovs_interfaceid": "509b2377-84e7-48a6-b2ed-811f288cc65c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1138.923954] env[69328]: DEBUG nova.compute.manager [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1138.951775] env[69328]: DEBUG nova.virt.hardware [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1138.952093] env[69328]: DEBUG nova.virt.hardware [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1138.952287] env[69328]: DEBUG nova.virt.hardware [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1138.952463] env[69328]: DEBUG nova.virt.hardware [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1138.952607] env[69328]: DEBUG nova.virt.hardware [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1138.952750] env[69328]: DEBUG nova.virt.hardware [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1138.952979] env[69328]: DEBUG nova.virt.hardware [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1138.953156] env[69328]: DEBUG nova.virt.hardware [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1138.953349] env[69328]: DEBUG nova.virt.hardware [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1138.953519] env[69328]: DEBUG nova.virt.hardware [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1138.953709] env[69328]: DEBUG nova.virt.hardware [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1138.954235] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Releasing lock "refresh_cache-ff815ffb-3422-469e-9b54-b33502826513" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1138.954530] env[69328]: DEBUG nova.compute.manager [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: ff815ffb-3422-469e-9b54-b33502826513] Instance network_info: |[{"id": "ecf2d696-3969-4c5e-ac8c-0578b4981440", "address": "fa:16:3e:15:2d:17", "network": {"id": "2e8bdd1b-0cca-4f7c-bba3-ff1750073020", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2058593014-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "088bc9e3aeb449baa0a522342d57d183", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapecf2d696-39", "ovs_interfaceid": "ecf2d696-3969-4c5e-ac8c-0578b4981440", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1138.955403] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91c94a9f-3a85-41ac-88b6-295d180e8a86 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.958646] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: ff815ffb-3422-469e-9b54-b33502826513] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:15:2d:17', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '43ad01d2-c7dd-453c-a929-8ad76294d13c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ecf2d696-3969-4c5e-ac8c-0578b4981440', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1138.966023] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1138.966529] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff815ffb-3422-469e-9b54-b33502826513] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1138.967195] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f5c6159b-3904-49f9-a3af-3a0e9b2f3503 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.985111] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daad72d6-3a80-46fe-9a97-4ebd0793a329 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.989959] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1138.989959] env[69328]: value = "task-3274153" [ 1138.989959] env[69328]: _type = "Task" [ 1138.989959] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.999193] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274153, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.032096] env[69328]: DEBUG nova.scheduler.client.report [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1139.059701] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9aaf1844-f000-4e12-b49a-d70f29c67856 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1139.061757] env[69328]: DEBUG nova.virt.hardware [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1139.062009] env[69328]: DEBUG nova.virt.hardware [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1139.062221] env[69328]: DEBUG nova.virt.hardware [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1139.062634] env[69328]: DEBUG nova.virt.hardware [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1139.062634] env[69328]: DEBUG nova.virt.hardware [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1139.062740] env[69328]: DEBUG nova.virt.hardware [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1139.062946] env[69328]: DEBUG nova.virt.hardware [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1139.063127] env[69328]: DEBUG nova.virt.hardware [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1139.063323] env[69328]: DEBUG nova.virt.hardware [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1139.063483] env[69328]: DEBUG nova.virt.hardware [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1139.063665] env[69328]: DEBUG nova.virt.hardware [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1139.070290] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Reconfiguring VM instance instance-0000006e to detach disk 2000 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1139.071055] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-edbe2412-b7ca-4f5d-baaf-12f033e23ca2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.095968] env[69328]: DEBUG oslo_vmware.api [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 1139.095968] env[69328]: value = "task-3274154" [ 1139.095968] env[69328]: _type = "Task" [ 1139.095968] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.105444] env[69328]: DEBUG oslo_vmware.api [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3274154, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.149287] env[69328]: DEBUG nova.compute.manager [req-ef3fe774-f901-4b00-8569-bcdd647208c1 req-cead163e-b959-47cf-accb-6becaee67e1b service nova] [instance: ff815ffb-3422-469e-9b54-b33502826513] Received event network-changed-ecf2d696-3969-4c5e-ac8c-0578b4981440 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1139.149613] env[69328]: DEBUG nova.compute.manager [req-ef3fe774-f901-4b00-8569-bcdd647208c1 req-cead163e-b959-47cf-accb-6becaee67e1b service nova] [instance: ff815ffb-3422-469e-9b54-b33502826513] Refreshing instance network info cache due to event network-changed-ecf2d696-3969-4c5e-ac8c-0578b4981440. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1139.149934] env[69328]: DEBUG oslo_concurrency.lockutils [req-ef3fe774-f901-4b00-8569-bcdd647208c1 req-cead163e-b959-47cf-accb-6becaee67e1b service nova] Acquiring lock "refresh_cache-ff815ffb-3422-469e-9b54-b33502826513" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1139.150172] env[69328]: DEBUG oslo_concurrency.lockutils [req-ef3fe774-f901-4b00-8569-bcdd647208c1 req-cead163e-b959-47cf-accb-6becaee67e1b service nova] Acquired lock "refresh_cache-ff815ffb-3422-469e-9b54-b33502826513" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1139.150433] env[69328]: DEBUG nova.network.neutron [req-ef3fe774-f901-4b00-8569-bcdd647208c1 req-cead163e-b959-47cf-accb-6becaee67e1b service nova] [instance: ff815ffb-3422-469e-9b54-b33502826513] Refreshing network info cache for port ecf2d696-3969-4c5e-ac8c-0578b4981440 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1139.195915] env[69328]: DEBUG oslo_vmware.api [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Task: {'id': task-3274152, 'name': Rename_Task, 'duration_secs': 0.156607} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.196232] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1139.196517] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f2652836-e003-4295-a3cb-df857ee56872 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.205511] env[69328]: DEBUG oslo_vmware.api [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Waiting for the task: (returnval){ [ 1139.205511] env[69328]: value = "task-3274155" [ 1139.205511] env[69328]: _type = "Task" [ 1139.205511] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.214743] env[69328]: DEBUG oslo_vmware.api [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Task: {'id': task-3274155, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.420348] env[69328]: DEBUG oslo_concurrency.lockutils [req-91eb1ad5-f5ae-4c23-8968-706d5782bc1d req-82922bde-9ac2-4e4a-b814-1a9a6ea8ac09 service nova] Releasing lock "refresh_cache-33583ef3-252c-45d4-a514-5646f98c5f45" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1139.503899] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274153, 'name': CreateVM_Task, 'duration_secs': 0.361878} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.503899] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff815ffb-3422-469e-9b54-b33502826513] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1139.504547] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1139.504817] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1139.505027] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1139.505310] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee18362b-215f-4309-97e3-9fa94784a745 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.510732] env[69328]: DEBUG oslo_vmware.api [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for the task: (returnval){ [ 1139.510732] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d0bcc0-5c26-98a5-2ff8-4204538666a5" [ 1139.510732] env[69328]: _type = "Task" [ 1139.510732] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.519676] env[69328]: DEBUG oslo_vmware.api [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d0bcc0-5c26-98a5-2ff8-4204538666a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.538021] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69328) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1139.538021] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.520s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1139.538161] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9aaf1844-f000-4e12-b49a-d70f29c67856 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.479s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1139.538330] env[69328]: DEBUG nova.objects.instance [None req-9aaf1844-f000-4e12-b49a-d70f29c67856 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lazy-loading 'resources' on Instance uuid 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1139.539407] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1139.539559] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Cleaning up deleted instances with incomplete migration {{(pid=69328) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11872}} [ 1139.606575] env[69328]: DEBUG oslo_vmware.api [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3274154, 'name': ReconfigVM_Task, 'duration_secs': 0.251869} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.606855] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Reconfigured VM instance instance-0000006e to detach disk 2000 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1139.607942] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67c29e45-dc3e-496b-9968-a071c2189280 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.630088] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Reconfiguring VM instance instance-0000006e to attach disk [datastore2] volume-a2de811d-614f-4456-ac21-52535c9e5fd6/volume-a2de811d-614f-4456-ac21-52535c9e5fd6.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1139.630396] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ddef1758-13ef-4766-8c69-6cbbca6fb07d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.659919] env[69328]: DEBUG oslo_vmware.api [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 1139.659919] env[69328]: value = "task-3274156" [ 1139.659919] env[69328]: _type = "Task" [ 1139.659919] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.667665] env[69328]: DEBUG oslo_vmware.api [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3274156, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.716627] env[69328]: DEBUG oslo_vmware.api [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Task: {'id': task-3274155, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.851481] env[69328]: DEBUG nova.network.neutron [req-ef3fe774-f901-4b00-8569-bcdd647208c1 req-cead163e-b959-47cf-accb-6becaee67e1b service nova] [instance: ff815ffb-3422-469e-9b54-b33502826513] Updated VIF entry in instance network info cache for port ecf2d696-3969-4c5e-ac8c-0578b4981440. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1139.851830] env[69328]: DEBUG nova.network.neutron [req-ef3fe774-f901-4b00-8569-bcdd647208c1 req-cead163e-b959-47cf-accb-6becaee67e1b service nova] [instance: ff815ffb-3422-469e-9b54-b33502826513] Updating instance_info_cache with network_info: [{"id": "ecf2d696-3969-4c5e-ac8c-0578b4981440", "address": "fa:16:3e:15:2d:17", "network": {"id": "2e8bdd1b-0cca-4f7c-bba3-ff1750073020", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2058593014-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "088bc9e3aeb449baa0a522342d57d183", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapecf2d696-39", "ovs_interfaceid": "ecf2d696-3969-4c5e-ac8c-0578b4981440", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1139.876618] env[69328]: DEBUG nova.network.neutron [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] Successfully updated port: 372b585b-44c4-4862-adf0-76a6931af0cd {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1140.021985] env[69328]: DEBUG oslo_vmware.api [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d0bcc0-5c26-98a5-2ff8-4204538666a5, 'name': SearchDatastore_Task, 'duration_secs': 0.011883} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.022337] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1140.022572] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: ff815ffb-3422-469e-9b54-b33502826513] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1140.022801] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1140.022945] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1140.023140] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1140.023393] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ca83d487-4db6-4524-afd0-42acb2a06614 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.031835] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1140.032014] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1140.032698] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a968f635-841c-4667-825d-65343e039bb9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.037710] env[69328]: DEBUG oslo_vmware.api [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for the task: (returnval){ [ 1140.037710] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d4fcdd-67f4-406b-467e-6fbba30ac1f6" [ 1140.037710] env[69328]: _type = "Task" [ 1140.037710] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.049916] env[69328]: DEBUG oslo_vmware.api [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d4fcdd-67f4-406b-467e-6fbba30ac1f6, 'name': SearchDatastore_Task, 'duration_secs': 0.009605} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.050689] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5bc425a-b22a-4107-8a1e-1fdb091d71b7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.056636] env[69328]: DEBUG oslo_vmware.api [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for the task: (returnval){ [ 1140.056636] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]522daab6-633e-a147-ad94-f2b8d059c5fd" [ 1140.056636] env[69328]: _type = "Task" [ 1140.056636] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.067442] env[69328]: DEBUG oslo_vmware.api [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]522daab6-633e-a147-ad94-f2b8d059c5fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.169241] env[69328]: DEBUG oslo_vmware.api [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3274156, 'name': ReconfigVM_Task, 'duration_secs': 0.459741} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.169527] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Reconfigured VM instance instance-0000006e to attach disk [datastore2] volume-a2de811d-614f-4456-ac21-52535c9e5fd6/volume-a2de811d-614f-4456-ac21-52535c9e5fd6.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1140.170692] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Updating instance 'ee3609ea-0855-47c2-874c-349c80419781' progress to 50 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1140.178059] env[69328]: DEBUG nova.compute.manager [req-d8780ea7-991f-4215-bca0-95ea7045ac6c req-2bdc9b35-d5bf-4aed-a485-ee135f383fd6 service nova] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Received event network-changed-19978029-822a-48e0-b3c1-9d885b82a5f3 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1140.178252] env[69328]: DEBUG nova.compute.manager [req-d8780ea7-991f-4215-bca0-95ea7045ac6c req-2bdc9b35-d5bf-4aed-a485-ee135f383fd6 service nova] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Refreshing instance network info cache due to event network-changed-19978029-822a-48e0-b3c1-9d885b82a5f3. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1140.178462] env[69328]: DEBUG oslo_concurrency.lockutils [req-d8780ea7-991f-4215-bca0-95ea7045ac6c req-2bdc9b35-d5bf-4aed-a485-ee135f383fd6 service nova] Acquiring lock "refresh_cache-ae46c18e-15ae-4a47-b05a-a143f10b5ab6" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1140.178602] env[69328]: DEBUG oslo_concurrency.lockutils [req-d8780ea7-991f-4215-bca0-95ea7045ac6c req-2bdc9b35-d5bf-4aed-a485-ee135f383fd6 service nova] Acquired lock "refresh_cache-ae46c18e-15ae-4a47-b05a-a143f10b5ab6" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1140.178760] env[69328]: DEBUG nova.network.neutron [req-d8780ea7-991f-4215-bca0-95ea7045ac6c req-2bdc9b35-d5bf-4aed-a485-ee135f383fd6 service nova] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Refreshing network info cache for port 19978029-822a-48e0-b3c1-9d885b82a5f3 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1140.215780] env[69328]: DEBUG oslo_vmware.api [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Task: {'id': task-3274155, 'name': PowerOnVM_Task, 'duration_secs': 0.792469} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.218768] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1140.218975] env[69328]: INFO nova.compute.manager [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] Took 7.98 seconds to spawn the instance on the hypervisor. [ 1140.219167] env[69328]: DEBUG nova.compute.manager [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1140.220175] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e931f73b-3d73-4c83-838e-8891b4c61640 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.303132] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a63942ae-6c10-4dfc-a07f-9420efa3e783 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.311288] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1444262-4b52-446f-88cd-70648474745f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.341859] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-369c8ce6-6329-4f6a-93a8-64818cb2cc54 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.349747] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc82d92e-6493-400e-ab69-963b6ac65abe {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.353934] env[69328]: DEBUG oslo_concurrency.lockutils [req-ef3fe774-f901-4b00-8569-bcdd647208c1 req-cead163e-b959-47cf-accb-6becaee67e1b service nova] Releasing lock "refresh_cache-ff815ffb-3422-469e-9b54-b33502826513" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1140.354221] env[69328]: DEBUG nova.compute.manager [req-ef3fe774-f901-4b00-8569-bcdd647208c1 req-cead163e-b959-47cf-accb-6becaee67e1b service nova] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] Received event network-vif-deleted-8ca9303b-2679-4187-add6-38fd1acef103 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1140.362642] env[69328]: DEBUG nova.compute.provider_tree [None req-9aaf1844-f000-4e12-b49a-d70f29c67856 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1140.378516] env[69328]: DEBUG oslo_concurrency.lockutils [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Acquiring lock "refresh_cache-03f0adc8-d640-4248-be9d-ab4ba0cbe760" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1140.378658] env[69328]: DEBUG oslo_concurrency.lockutils [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Acquired lock "refresh_cache-03f0adc8-d640-4248-be9d-ab4ba0cbe760" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1140.378805] env[69328]: DEBUG nova.network.neutron [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1140.571663] env[69328]: DEBUG oslo_vmware.api [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]522daab6-633e-a147-ad94-f2b8d059c5fd, 'name': SearchDatastore_Task, 'duration_secs': 0.011801} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.572039] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1140.572387] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] ff815ffb-3422-469e-9b54-b33502826513/ff815ffb-3422-469e-9b54-b33502826513.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1140.572730] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5b463e1c-0ba1-4ac5-8381-4f2d1cbb513e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.580571] env[69328]: DEBUG oslo_vmware.api [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for the task: (returnval){ [ 1140.580571] env[69328]: value = "task-3274157" [ 1140.580571] env[69328]: _type = "Task" [ 1140.580571] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.590049] env[69328]: DEBUG oslo_vmware.api [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3274157, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.631738] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1140.632076] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1140.676388] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeadbc8b-ec3f-42ba-927c-9a529020970f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.698638] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9f4be41-ec40-4b80-a4cc-ce158ad5ba32 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.719536] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Updating instance 'ee3609ea-0855-47c2-874c-349c80419781' progress to 67 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1140.739697] env[69328]: INFO nova.compute.manager [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] Took 15.26 seconds to build instance. [ 1140.865749] env[69328]: DEBUG nova.scheduler.client.report [None req-9aaf1844-f000-4e12-b49a-d70f29c67856 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1140.915104] env[69328]: DEBUG nova.network.neutron [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1140.984510] env[69328]: DEBUG nova.network.neutron [req-d8780ea7-991f-4215-bca0-95ea7045ac6c req-2bdc9b35-d5bf-4aed-a485-ee135f383fd6 service nova] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Updated VIF entry in instance network info cache for port 19978029-822a-48e0-b3c1-9d885b82a5f3. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1140.984881] env[69328]: DEBUG nova.network.neutron [req-d8780ea7-991f-4215-bca0-95ea7045ac6c req-2bdc9b35-d5bf-4aed-a485-ee135f383fd6 service nova] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Updating instance_info_cache with network_info: [{"id": "19978029-822a-48e0-b3c1-9d885b82a5f3", "address": "fa:16:3e:99:f9:c3", "network": {"id": "3be6b159-5d5c-4752-b79d-0ed6110569d0", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-915151552-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.195", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f50ac50ef6ae4abc83a8064746de7029", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe20ef0e-0991-44d7-887d-08dddac0b56b", "external-id": "nsx-vlan-transportzone-991", "segmentation_id": 991, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19978029-82", "ovs_interfaceid": "19978029-822a-48e0-b3c1-9d885b82a5f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1141.063531] env[69328]: DEBUG nova.network.neutron [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] Updating instance_info_cache with network_info: [{"id": "372b585b-44c4-4862-adf0-76a6931af0cd", "address": "fa:16:3e:08:df:c0", "network": {"id": "c37f7cbb-9e72-43fb-b82a-5602208856c5", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1726899944-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e9e98f83e974a32b0db6ce5e8442012", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4fc0575-c8e3-4f3c-b2e1-e10ac2d0cc1a", "external-id": "nsx-vlan-transportzone-256", "segmentation_id": 256, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap372b585b-44", "ovs_interfaceid": "372b585b-44c4-4862-adf0-76a6931af0cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1141.091373] env[69328]: DEBUG oslo_vmware.api [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3274157, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.140578] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1141.140830] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1141.141041] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1141.141192] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1141.141357] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1141.141696] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager.update_available_resource {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1141.242593] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ddaaddcf-b66f-4039-a886-3a12fe755a3a tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Lock "79d66d5d-e1a4-4bc0-8e43-db97153867e3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.772s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1141.244820] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1f57b610-d4e0-46f3-ab79-daab369f4dee tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Acquiring lock "79d66d5d-e1a4-4bc0-8e43-db97153867e3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1141.244820] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1f57b610-d4e0-46f3-ab79-daab369f4dee tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Lock "79d66d5d-e1a4-4bc0-8e43-db97153867e3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1141.246070] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1f57b610-d4e0-46f3-ab79-daab369f4dee tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Acquiring lock "79d66d5d-e1a4-4bc0-8e43-db97153867e3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1141.246299] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1f57b610-d4e0-46f3-ab79-daab369f4dee tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Lock "79d66d5d-e1a4-4bc0-8e43-db97153867e3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1141.246481] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1f57b610-d4e0-46f3-ab79-daab369f4dee tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Lock "79d66d5d-e1a4-4bc0-8e43-db97153867e3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1141.248603] env[69328]: INFO nova.compute.manager [None req-1f57b610-d4e0-46f3-ab79-daab369f4dee tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] Terminating instance [ 1141.371228] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9aaf1844-f000-4e12-b49a-d70f29c67856 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.833s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1141.487765] env[69328]: DEBUG oslo_concurrency.lockutils [req-d8780ea7-991f-4215-bca0-95ea7045ac6c req-2bdc9b35-d5bf-4aed-a485-ee135f383fd6 service nova] Releasing lock "refresh_cache-ae46c18e-15ae-4a47-b05a-a143f10b5ab6" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1141.488055] env[69328]: DEBUG nova.compute.manager [req-d8780ea7-991f-4215-bca0-95ea7045ac6c req-2bdc9b35-d5bf-4aed-a485-ee135f383fd6 service nova] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Received event network-changed-509b2377-84e7-48a6-b2ed-811f288cc65c {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1141.488267] env[69328]: DEBUG nova.compute.manager [req-d8780ea7-991f-4215-bca0-95ea7045ac6c req-2bdc9b35-d5bf-4aed-a485-ee135f383fd6 service nova] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Refreshing instance network info cache due to event network-changed-509b2377-84e7-48a6-b2ed-811f288cc65c. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1141.488493] env[69328]: DEBUG oslo_concurrency.lockutils [req-d8780ea7-991f-4215-bca0-95ea7045ac6c req-2bdc9b35-d5bf-4aed-a485-ee135f383fd6 service nova] Acquiring lock "refresh_cache-33583ef3-252c-45d4-a514-5646f98c5f45" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1141.488664] env[69328]: DEBUG oslo_concurrency.lockutils [req-d8780ea7-991f-4215-bca0-95ea7045ac6c req-2bdc9b35-d5bf-4aed-a485-ee135f383fd6 service nova] Acquired lock "refresh_cache-33583ef3-252c-45d4-a514-5646f98c5f45" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1141.488829] env[69328]: DEBUG nova.network.neutron [req-d8780ea7-991f-4215-bca0-95ea7045ac6c req-2bdc9b35-d5bf-4aed-a485-ee135f383fd6 service nova] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Refreshing network info cache for port 509b2377-84e7-48a6-b2ed-811f288cc65c {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1141.491382] env[69328]: INFO nova.scheduler.client.report [None req-9aaf1844-f000-4e12-b49a-d70f29c67856 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Deleted allocations for instance 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9 [ 1141.567177] env[69328]: DEBUG oslo_concurrency.lockutils [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Releasing lock "refresh_cache-03f0adc8-d640-4248-be9d-ab4ba0cbe760" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1141.567455] env[69328]: DEBUG nova.compute.manager [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] Instance network_info: |[{"id": "372b585b-44c4-4862-adf0-76a6931af0cd", "address": "fa:16:3e:08:df:c0", "network": {"id": "c37f7cbb-9e72-43fb-b82a-5602208856c5", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1726899944-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e9e98f83e974a32b0db6ce5e8442012", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4fc0575-c8e3-4f3c-b2e1-e10ac2d0cc1a", "external-id": "nsx-vlan-transportzone-256", "segmentation_id": 256, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap372b585b-44", "ovs_interfaceid": "372b585b-44c4-4862-adf0-76a6931af0cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1141.567861] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:08:df:c0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a4fc0575-c8e3-4f3c-b2e1-e10ac2d0cc1a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '372b585b-44c4-4862-adf0-76a6931af0cd', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1141.575672] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1141.576151] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1141.576393] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-158fa7c9-1b64-4c0f-9c15-dce471323978 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.599907] env[69328]: DEBUG oslo_vmware.api [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3274157, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.545074} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.601159] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] ff815ffb-3422-469e-9b54-b33502826513/ff815ffb-3422-469e-9b54-b33502826513.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1141.601384] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: ff815ffb-3422-469e-9b54-b33502826513] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1141.601615] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1141.601615] env[69328]: value = "task-3274158" [ 1141.601615] env[69328]: _type = "Task" [ 1141.601615] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.601919] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0983a8e0-d24e-461d-8eeb-744acf4d026b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.613316] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274158, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.614405] env[69328]: DEBUG oslo_vmware.api [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for the task: (returnval){ [ 1141.614405] env[69328]: value = "task-3274159" [ 1141.614405] env[69328]: _type = "Task" [ 1141.614405] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.622949] env[69328]: DEBUG oslo_vmware.api [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3274159, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.645162] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1141.645430] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1141.645608] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1141.645762] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69328) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1141.646702] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-793adef2-9549-49f7-aa3e-90ce79780c95 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.655666] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d70722ce-edbd-4228-bc1d-07bee4989afb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.670675] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e083f90-2921-429e-85dd-2e1150d2a457 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.678216] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9013f42-ecfa-47eb-ba97-adc0d00aff15 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.713207] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178548MB free_disk=115GB free_vcpus=48 pci_devices=None {{(pid=69328) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1141.713414] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1141.713681] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1141.754781] env[69328]: DEBUG nova.compute.manager [None req-1f57b610-d4e0-46f3-ab79-daab369f4dee tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1141.754998] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-1f57b610-d4e0-46f3-ab79-daab369f4dee tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1141.755950] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d053ab3-099e-4229-be8d-c92d9adcb8d9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.763792] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f57b610-d4e0-46f3-ab79-daab369f4dee tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1141.764040] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b95616b2-d14b-41b9-bf42-db87db1e496e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.771135] env[69328]: DEBUG oslo_vmware.api [None req-1f57b610-d4e0-46f3-ab79-daab369f4dee tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Waiting for the task: (returnval){ [ 1141.771135] env[69328]: value = "task-3274160" [ 1141.771135] env[69328]: _type = "Task" [ 1141.771135] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.780905] env[69328]: DEBUG oslo_vmware.api [None req-1f57b610-d4e0-46f3-ab79-daab369f4dee tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Task: {'id': task-3274160, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.905373] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a4cd4906-8e70-4668-ae51-db77a5a28560 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1141.905747] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a4cd4906-8e70-4668-ae51-db77a5a28560 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1141.906034] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a4cd4906-8e70-4668-ae51-db77a5a28560 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1141.906267] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a4cd4906-8e70-4668-ae51-db77a5a28560 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1141.906513] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a4cd4906-8e70-4668-ae51-db77a5a28560 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1141.908773] env[69328]: INFO nova.compute.manager [None req-a4cd4906-8e70-4668-ae51-db77a5a28560 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Terminating instance [ 1141.998603] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9aaf1844-f000-4e12-b49a-d70f29c67856 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "32b9acbc-35a0-4d67-ac74-ef46c45fa0b9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.795s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1142.115403] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274158, 'name': CreateVM_Task, 'duration_secs': 0.484984} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.118529] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1142.118921] env[69328]: DEBUG oslo_concurrency.lockutils [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1142.119096] env[69328]: DEBUG oslo_concurrency.lockutils [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1142.119417] env[69328]: DEBUG oslo_concurrency.lockutils [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1142.120034] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04345f16-b13c-495b-98bd-001cae0556b3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.124977] env[69328]: DEBUG oslo_vmware.api [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3274159, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078841} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.126216] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: ff815ffb-3422-469e-9b54-b33502826513] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1142.126558] env[69328]: DEBUG oslo_vmware.api [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Waiting for the task: (returnval){ [ 1142.126558] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52406527-d197-1a98-0062-88ace3dada8f" [ 1142.126558] env[69328]: _type = "Task" [ 1142.126558] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.127238] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c339dc1e-5218-4667-9e0b-ca01cf5cc08d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.148035] env[69328]: DEBUG oslo_vmware.api [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52406527-d197-1a98-0062-88ace3dada8f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.156536] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: ff815ffb-3422-469e-9b54-b33502826513] Reconfiguring VM instance instance-00000073 to attach disk [datastore2] ff815ffb-3422-469e-9b54-b33502826513/ff815ffb-3422-469e-9b54-b33502826513.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1142.157123] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-42e7be2b-8962-4dc0-84a6-ffc8ea5f91c4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.179587] env[69328]: DEBUG oslo_vmware.api [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for the task: (returnval){ [ 1142.179587] env[69328]: value = "task-3274161" [ 1142.179587] env[69328]: _type = "Task" [ 1142.179587] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.190440] env[69328]: DEBUG oslo_vmware.api [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3274161, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.243029] env[69328]: DEBUG nova.network.neutron [req-d8780ea7-991f-4215-bca0-95ea7045ac6c req-2bdc9b35-d5bf-4aed-a485-ee135f383fd6 service nova] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Updated VIF entry in instance network info cache for port 509b2377-84e7-48a6-b2ed-811f288cc65c. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1142.243223] env[69328]: DEBUG nova.network.neutron [req-d8780ea7-991f-4215-bca0-95ea7045ac6c req-2bdc9b35-d5bf-4aed-a485-ee135f383fd6 service nova] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Updating instance_info_cache with network_info: [{"id": "509b2377-84e7-48a6-b2ed-811f288cc65c", "address": "fa:16:3e:f8:6d:5c", "network": {"id": "aed15283-4a79-4e99-8b6c-49cf754138de", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1271042528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30209bc93a4042488f15c73b7e4733d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap509b2377-84", "ovs_interfaceid": "509b2377-84e7-48a6-b2ed-811f288cc65c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1142.282960] env[69328]: DEBUG oslo_vmware.api [None req-1f57b610-d4e0-46f3-ab79-daab369f4dee tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Task: {'id': task-3274160, 'name': PowerOffVM_Task, 'duration_secs': 0.293972} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.283247] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f57b610-d4e0-46f3-ab79-daab369f4dee tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1142.283437] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-1f57b610-d4e0-46f3-ab79-daab369f4dee tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1142.283710] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-454178b8-85d7-48e7-8cb8-1607f77800c3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.350053] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-1f57b610-d4e0-46f3-ab79-daab369f4dee tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1142.350227] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-1f57b610-d4e0-46f3-ab79-daab369f4dee tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1142.350371] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f57b610-d4e0-46f3-ab79-daab369f4dee tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Deleting the datastore file [datastore2] 79d66d5d-e1a4-4bc0-8e43-db97153867e3 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1142.350680] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b59f3fef-5840-4872-863f-bfaeb5a06fc6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.358219] env[69328]: DEBUG oslo_vmware.api [None req-1f57b610-d4e0-46f3-ab79-daab369f4dee tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Waiting for the task: (returnval){ [ 1142.358219] env[69328]: value = "task-3274163" [ 1142.358219] env[69328]: _type = "Task" [ 1142.358219] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.366774] env[69328]: DEBUG oslo_vmware.api [None req-1f57b610-d4e0-46f3-ab79-daab369f4dee tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Task: {'id': task-3274163, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.377354] env[69328]: DEBUG nova.network.neutron [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Port ce79bad7-6bfd-4645-bc55-71dfc049411d binding to destination host cpu-1 is already ACTIVE {{(pid=69328) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1142.412522] env[69328]: DEBUG nova.compute.manager [None req-a4cd4906-8e70-4668-ae51-db77a5a28560 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1142.412763] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a4cd4906-8e70-4668-ae51-db77a5a28560 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1142.414061] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db57f1d5-f2ba-4db6-b2be-f27a99abe160 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.422531] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4cd4906-8e70-4668-ae51-db77a5a28560 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1142.422800] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bc29866d-bf32-457a-923d-87aa7e4b6a9c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.430184] env[69328]: DEBUG oslo_vmware.api [None req-a4cd4906-8e70-4668-ae51-db77a5a28560 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1142.430184] env[69328]: value = "task-3274164" [ 1142.430184] env[69328]: _type = "Task" [ 1142.430184] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.440245] env[69328]: DEBUG oslo_vmware.api [None req-a4cd4906-8e70-4668-ae51-db77a5a28560 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274164, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.640673] env[69328]: DEBUG oslo_vmware.api [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52406527-d197-1a98-0062-88ace3dada8f, 'name': SearchDatastore_Task, 'duration_secs': 0.018342} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.641078] env[69328]: DEBUG oslo_concurrency.lockutils [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1142.641237] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1142.642180] env[69328]: DEBUG oslo_concurrency.lockutils [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1142.642180] env[69328]: DEBUG oslo_concurrency.lockutils [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1142.642180] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1142.642404] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aa856f9b-ae6b-4b1d-aee4-2468bfccb595 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.667548] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1142.667750] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1142.668589] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-108459f2-e3d5-4ab3-8082-e7de4e10d64f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.674771] env[69328]: DEBUG oslo_vmware.api [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Waiting for the task: (returnval){ [ 1142.674771] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5220793e-431e-d77a-7094-1bf7b43743c9" [ 1142.674771] env[69328]: _type = "Task" [ 1142.674771] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.684094] env[69328]: DEBUG oslo_vmware.api [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5220793e-431e-d77a-7094-1bf7b43743c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.692463] env[69328]: DEBUG oslo_vmware.api [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3274161, 'name': ReconfigVM_Task, 'duration_secs': 0.334604} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.692769] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: ff815ffb-3422-469e-9b54-b33502826513] Reconfigured VM instance instance-00000073 to attach disk [datastore2] ff815ffb-3422-469e-9b54-b33502826513/ff815ffb-3422-469e-9b54-b33502826513.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1142.693559] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2be39d9f-54f5-4eab-8c78-f4b3e9cff231 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.702256] env[69328]: DEBUG oslo_vmware.api [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for the task: (returnval){ [ 1142.702256] env[69328]: value = "task-3274165" [ 1142.702256] env[69328]: _type = "Task" [ 1142.702256] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.711298] env[69328]: DEBUG oslo_vmware.api [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3274165, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.728506] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Applying migration context for instance ee3609ea-0855-47c2-874c-349c80419781 as it has an incoming, in-progress migration c9d84624-27a9-4e50-abe2-112cbefbaf04. Migration status is migrating {{(pid=69328) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1142.730994] env[69328]: INFO nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: ee3609ea-0855-47c2-874c-349c80419781] Updating resource usage from migration c9d84624-27a9-4e50-abe2-112cbefbaf04 [ 1142.746351] env[69328]: DEBUG oslo_concurrency.lockutils [req-d8780ea7-991f-4215-bca0-95ea7045ac6c req-2bdc9b35-d5bf-4aed-a485-ee135f383fd6 service nova] Releasing lock "refresh_cache-33583ef3-252c-45d4-a514-5646f98c5f45" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1142.746648] env[69328]: DEBUG nova.compute.manager [req-d8780ea7-991f-4215-bca0-95ea7045ac6c req-2bdc9b35-d5bf-4aed-a485-ee135f383fd6 service nova] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Received event network-changed-13436ecc-0cb3-4c13-bf18-f81195196ffd {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1142.746811] env[69328]: DEBUG nova.compute.manager [req-d8780ea7-991f-4215-bca0-95ea7045ac6c req-2bdc9b35-d5bf-4aed-a485-ee135f383fd6 service nova] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Refreshing instance network info cache due to event network-changed-13436ecc-0cb3-4c13-bf18-f81195196ffd. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1142.747094] env[69328]: DEBUG oslo_concurrency.lockutils [req-d8780ea7-991f-4215-bca0-95ea7045ac6c req-2bdc9b35-d5bf-4aed-a485-ee135f383fd6 service nova] Acquiring lock "refresh_cache-de8e6616-0460-4a6e-918c-a27818da96e2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1142.748204] env[69328]: DEBUG oslo_concurrency.lockutils [req-d8780ea7-991f-4215-bca0-95ea7045ac6c req-2bdc9b35-d5bf-4aed-a485-ee135f383fd6 service nova] Acquired lock "refresh_cache-de8e6616-0460-4a6e-918c-a27818da96e2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1142.748204] env[69328]: DEBUG nova.network.neutron [req-d8780ea7-991f-4215-bca0-95ea7045ac6c req-2bdc9b35-d5bf-4aed-a485-ee135f383fd6 service nova] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Refreshing network info cache for port 13436ecc-0cb3-4c13-bf18-f81195196ffd {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1142.755150] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance b0a1441c-81e2-4131-a2ff-f5042d559d9f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1142.755150] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance a0b663eb-31b0-4de1-94bc-660a7d9c1c7b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1142.755150] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance f1be93b2-08db-41fe-87c4-f4e5f964cfa4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1142.755417] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance c751ef77-c3be-46cd-b7eb-fe139bf0998b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1142.755417] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 1413dcfe-3570-4657-b811-81a1acc159d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1142.755417] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 52c87371-4142-40d6-ac68-804aabd9f823 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1142.755510] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1142.755627] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 275ef1ed-8e60-4151-b548-e22e5bd8efe2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1142.755739] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance c1829dcf-3608-4955-bd50-eb9ee27d38e1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1142.755850] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance fb2d04d8-cff6-414c-9d50-3ab61729546d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1142.755957] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance de8e6616-0460-4a6e-918c-a27818da96e2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1142.756081] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1142.756192] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 33583ef3-252c-45d4-a514-5646f98c5f45 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1142.756304] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance ae46c18e-15ae-4a47-b05a-a143f10b5ab6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1142.756412] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 79d66d5d-e1a4-4bc0-8e43-db97153867e3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1142.756556] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Migration c9d84624-27a9-4e50-abe2-112cbefbaf04 is active on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1142.756734] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance ee3609ea-0855-47c2-874c-349c80419781 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1142.756889] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance ff815ffb-3422-469e-9b54-b33502826513 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1142.757057] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 03f0adc8-d640-4248-be9d-ab4ba0cbe760 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1142.757300] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Total usable vcpus: 48, total allocated vcpus: 19 {{(pid=69328) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1142.757446] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4224MB phys_disk=200GB used_disk=16GB total_vcpus=48 used_vcpus=19 pci_stats=[] {{(pid=69328) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1142.872713] env[69328]: DEBUG oslo_vmware.api [None req-1f57b610-d4e0-46f3-ab79-daab369f4dee tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Task: {'id': task-3274163, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.238223} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.872975] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f57b610-d4e0-46f3-ab79-daab369f4dee tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1142.873175] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-1f57b610-d4e0-46f3-ab79-daab369f4dee tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1142.873351] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-1f57b610-d4e0-46f3-ab79-daab369f4dee tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1142.873531] env[69328]: INFO nova.compute.manager [None req-1f57b610-d4e0-46f3-ab79-daab369f4dee tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1142.873748] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1f57b610-d4e0-46f3-ab79-daab369f4dee tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1142.876387] env[69328]: DEBUG nova.compute.manager [-] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1142.876441] env[69328]: DEBUG nova.network.neutron [-] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1142.942755] env[69328]: DEBUG oslo_vmware.api [None req-a4cd4906-8e70-4668-ae51-db77a5a28560 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274164, 'name': PowerOffVM_Task, 'duration_secs': 0.332526} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.943119] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4cd4906-8e70-4668-ae51-db77a5a28560 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1142.943622] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a4cd4906-8e70-4668-ae51-db77a5a28560 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1142.945464] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b396fffd-e281-4a2d-8a31-bd05c45b10ec {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.018274] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8f12097-d82c-42d0-9787-5462ad78a09c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.021139] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a4cd4906-8e70-4668-ae51-db77a5a28560 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1143.021597] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a4cd4906-8e70-4668-ae51-db77a5a28560 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1143.021948] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4cd4906-8e70-4668-ae51-db77a5a28560 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Deleting the datastore file [datastore2] dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1143.023196] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5f6fbf73-64d1-43ad-af03-252700f66164 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.031812] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bac2f97e-285f-4727-81ce-dae065a5b927 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.040068] env[69328]: DEBUG oslo_vmware.api [None req-a4cd4906-8e70-4668-ae51-db77a5a28560 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1143.040068] env[69328]: value = "task-3274167" [ 1143.040068] env[69328]: _type = "Task" [ 1143.040068] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.082121] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7ca2a88-b3c0-47e8-9b97-fd94454f5c2b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.091499] env[69328]: DEBUG oslo_vmware.api [None req-a4cd4906-8e70-4668-ae51-db77a5a28560 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274167, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.097866] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32394c37-68c4-41e1-8a2f-8c7ca70bb104 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.112066] env[69328]: DEBUG nova.compute.provider_tree [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1143.187423] env[69328]: DEBUG oslo_vmware.api [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5220793e-431e-d77a-7094-1bf7b43743c9, 'name': SearchDatastore_Task, 'duration_secs': 0.012949} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.187637] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4c5eecb-4fad-41f1-8347-d0d793e6a76f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.194919] env[69328]: DEBUG oslo_vmware.api [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Waiting for the task: (returnval){ [ 1143.194919] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f4eba4-bb50-d29d-1b71-8bd059313af6" [ 1143.194919] env[69328]: _type = "Task" [ 1143.194919] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.204891] env[69328]: DEBUG oslo_vmware.api [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f4eba4-bb50-d29d-1b71-8bd059313af6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.215658] env[69328]: DEBUG oslo_vmware.api [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3274165, 'name': Rename_Task, 'duration_secs': 0.336592} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.216368] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: ff815ffb-3422-469e-9b54-b33502826513] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1143.216368] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0191fd09-3199-4728-a51c-aeb36275ecce {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.224904] env[69328]: DEBUG oslo_vmware.api [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for the task: (returnval){ [ 1143.224904] env[69328]: value = "task-3274168" [ 1143.224904] env[69328]: _type = "Task" [ 1143.224904] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.231307] env[69328]: DEBUG nova.compute.manager [req-c6007d3f-a62a-42a0-a327-4706c5c0a045 req-ecdb7e09-a41b-472a-995d-0ec449c97908 service nova] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] Received event network-vif-deleted-c23855e3-70d4-4725-b057-d2d1f6f1d80e {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1143.231691] env[69328]: INFO nova.compute.manager [req-c6007d3f-a62a-42a0-a327-4706c5c0a045 req-ecdb7e09-a41b-472a-995d-0ec449c97908 service nova] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] Neutron deleted interface c23855e3-70d4-4725-b057-d2d1f6f1d80e; detaching it from the instance and deleting it from the info cache [ 1143.231691] env[69328]: DEBUG nova.network.neutron [req-c6007d3f-a62a-42a0-a327-4706c5c0a045 req-ecdb7e09-a41b-472a-995d-0ec449c97908 service nova] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1143.239251] env[69328]: DEBUG oslo_vmware.api [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3274168, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.363579] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d8b5509f-1cf6-4ced-97c0-e380ecdac5f6 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "a0b663eb-31b0-4de1-94bc-660a7d9c1c7b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1143.364149] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d8b5509f-1cf6-4ced-97c0-e380ecdac5f6 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "a0b663eb-31b0-4de1-94bc-660a7d9c1c7b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1143.364721] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d8b5509f-1cf6-4ced-97c0-e380ecdac5f6 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "a0b663eb-31b0-4de1-94bc-660a7d9c1c7b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1143.364838] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d8b5509f-1cf6-4ced-97c0-e380ecdac5f6 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "a0b663eb-31b0-4de1-94bc-660a7d9c1c7b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1143.365017] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d8b5509f-1cf6-4ced-97c0-e380ecdac5f6 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "a0b663eb-31b0-4de1-94bc-660a7d9c1c7b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1143.369107] env[69328]: INFO nova.compute.manager [None req-d8b5509f-1cf6-4ced-97c0-e380ecdac5f6 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a0b663eb-31b0-4de1-94bc-660a7d9c1c7b] Terminating instance [ 1143.400154] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "ee3609ea-0855-47c2-874c-349c80419781-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1143.400456] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "ee3609ea-0855-47c2-874c-349c80419781-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1143.400620] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "ee3609ea-0855-47c2-874c-349c80419781-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1143.544949] env[69328]: DEBUG nova.network.neutron [req-d8780ea7-991f-4215-bca0-95ea7045ac6c req-2bdc9b35-d5bf-4aed-a485-ee135f383fd6 service nova] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Updated VIF entry in instance network info cache for port 13436ecc-0cb3-4c13-bf18-f81195196ffd. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1143.545323] env[69328]: DEBUG nova.network.neutron [req-d8780ea7-991f-4215-bca0-95ea7045ac6c req-2bdc9b35-d5bf-4aed-a485-ee135f383fd6 service nova] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Updating instance_info_cache with network_info: [{"id": "13436ecc-0cb3-4c13-bf18-f81195196ffd", "address": "fa:16:3e:2e:1b:14", "network": {"id": "aed15283-4a79-4e99-8b6c-49cf754138de", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1271042528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30209bc93a4042488f15c73b7e4733d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13436ecc-0c", "ovs_interfaceid": "13436ecc-0cb3-4c13-bf18-f81195196ffd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1143.552017] env[69328]: DEBUG oslo_vmware.api [None req-a4cd4906-8e70-4668-ae51-db77a5a28560 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274167, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.496684} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.552318] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4cd4906-8e70-4668-ae51-db77a5a28560 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1143.552530] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a4cd4906-8e70-4668-ae51-db77a5a28560 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1143.552817] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a4cd4906-8e70-4668-ae51-db77a5a28560 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1143.553098] env[69328]: INFO nova.compute.manager [None req-a4cd4906-8e70-4668-ae51-db77a5a28560 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1143.553367] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a4cd4906-8e70-4668-ae51-db77a5a28560 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1143.553581] env[69328]: DEBUG nova.compute.manager [-] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1143.553691] env[69328]: DEBUG nova.network.neutron [-] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1143.615396] env[69328]: DEBUG nova.scheduler.client.report [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1143.707797] env[69328]: DEBUG oslo_vmware.api [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f4eba4-bb50-d29d-1b71-8bd059313af6, 'name': SearchDatastore_Task, 'duration_secs': 0.012379} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.708170] env[69328]: DEBUG oslo_concurrency.lockutils [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1143.708388] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 03f0adc8-d640-4248-be9d-ab4ba0cbe760/03f0adc8-d640-4248-be9d-ab4ba0cbe760.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1143.708723] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-db6dfcab-c034-4f53-a074-6d1e1ca723a3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.710888] env[69328]: DEBUG nova.network.neutron [-] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1143.719458] env[69328]: DEBUG oslo_vmware.api [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Waiting for the task: (returnval){ [ 1143.719458] env[69328]: value = "task-3274169" [ 1143.719458] env[69328]: _type = "Task" [ 1143.719458] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.733854] env[69328]: DEBUG oslo_vmware.api [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': task-3274169, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.733994] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8ad5aaac-e6ef-46d4-96ff-5aa62af93470 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.739951] env[69328]: DEBUG oslo_vmware.api [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3274168, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.750364] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79df40b4-6992-4780-874f-3f180090217e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.794101] env[69328]: DEBUG nova.compute.manager [req-c6007d3f-a62a-42a0-a327-4706c5c0a045 req-ecdb7e09-a41b-472a-995d-0ec449c97908 service nova] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] Detach interface failed, port_id=c23855e3-70d4-4725-b057-d2d1f6f1d80e, reason: Instance 79d66d5d-e1a4-4bc0-8e43-db97153867e3 could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1143.835690] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2e13d3e3-3ef9-44f8-aad8-2715bf73d241 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Acquiring lock "fb2d04d8-cff6-414c-9d50-3ab61729546d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1143.836103] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2e13d3e3-3ef9-44f8-aad8-2715bf73d241 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Lock "fb2d04d8-cff6-414c-9d50-3ab61729546d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1143.836303] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2e13d3e3-3ef9-44f8-aad8-2715bf73d241 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Acquiring lock "fb2d04d8-cff6-414c-9d50-3ab61729546d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1143.836568] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2e13d3e3-3ef9-44f8-aad8-2715bf73d241 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Lock "fb2d04d8-cff6-414c-9d50-3ab61729546d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1143.836760] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2e13d3e3-3ef9-44f8-aad8-2715bf73d241 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Lock "fb2d04d8-cff6-414c-9d50-3ab61729546d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1143.839639] env[69328]: INFO nova.compute.manager [None req-2e13d3e3-3ef9-44f8-aad8-2715bf73d241 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Terminating instance [ 1143.874294] env[69328]: DEBUG nova.compute.manager [None req-d8b5509f-1cf6-4ced-97c0-e380ecdac5f6 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a0b663eb-31b0-4de1-94bc-660a7d9c1c7b] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1143.874294] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d8b5509f-1cf6-4ced-97c0-e380ecdac5f6 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a0b663eb-31b0-4de1-94bc-660a7d9c1c7b] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1143.875096] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea425e8e-4ed7-4928-900f-79234fb6e229 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.883926] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8b5509f-1cf6-4ced-97c0-e380ecdac5f6 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a0b663eb-31b0-4de1-94bc-660a7d9c1c7b] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1143.884161] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-24df24eb-ae58-44fa-a48a-b29ee095f7cc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.891622] env[69328]: DEBUG oslo_vmware.api [None req-d8b5509f-1cf6-4ced-97c0-e380ecdac5f6 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 1143.891622] env[69328]: value = "task-3274170" [ 1143.891622] env[69328]: _type = "Task" [ 1143.891622] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.902506] env[69328]: DEBUG oslo_vmware.api [None req-d8b5509f-1cf6-4ced-97c0-e380ecdac5f6 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3274170, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.052306] env[69328]: DEBUG oslo_concurrency.lockutils [req-d8780ea7-991f-4215-bca0-95ea7045ac6c req-2bdc9b35-d5bf-4aed-a485-ee135f383fd6 service nova] Releasing lock "refresh_cache-de8e6616-0460-4a6e-918c-a27818da96e2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1144.052306] env[69328]: DEBUG nova.compute.manager [req-d8780ea7-991f-4215-bca0-95ea7045ac6c req-2bdc9b35-d5bf-4aed-a485-ee135f383fd6 service nova] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] Received event network-vif-plugged-372b585b-44c4-4862-adf0-76a6931af0cd {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1144.052306] env[69328]: DEBUG oslo_concurrency.lockutils [req-d8780ea7-991f-4215-bca0-95ea7045ac6c req-2bdc9b35-d5bf-4aed-a485-ee135f383fd6 service nova] Acquiring lock "03f0adc8-d640-4248-be9d-ab4ba0cbe760-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1144.052306] env[69328]: DEBUG oslo_concurrency.lockutils [req-d8780ea7-991f-4215-bca0-95ea7045ac6c req-2bdc9b35-d5bf-4aed-a485-ee135f383fd6 service nova] Lock "03f0adc8-d640-4248-be9d-ab4ba0cbe760-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1144.052306] env[69328]: DEBUG oslo_concurrency.lockutils [req-d8780ea7-991f-4215-bca0-95ea7045ac6c req-2bdc9b35-d5bf-4aed-a485-ee135f383fd6 service nova] Lock "03f0adc8-d640-4248-be9d-ab4ba0cbe760-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1144.052306] env[69328]: DEBUG nova.compute.manager [req-d8780ea7-991f-4215-bca0-95ea7045ac6c req-2bdc9b35-d5bf-4aed-a485-ee135f383fd6 service nova] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] No waiting events found dispatching network-vif-plugged-372b585b-44c4-4862-adf0-76a6931af0cd {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1144.052306] env[69328]: WARNING nova.compute.manager [req-d8780ea7-991f-4215-bca0-95ea7045ac6c req-2bdc9b35-d5bf-4aed-a485-ee135f383fd6 service nova] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] Received unexpected event network-vif-plugged-372b585b-44c4-4862-adf0-76a6931af0cd for instance with vm_state building and task_state spawning. [ 1144.052306] env[69328]: DEBUG nova.compute.manager [req-d8780ea7-991f-4215-bca0-95ea7045ac6c req-2bdc9b35-d5bf-4aed-a485-ee135f383fd6 service nova] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] Received event network-changed-372b585b-44c4-4862-adf0-76a6931af0cd {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1144.052306] env[69328]: DEBUG nova.compute.manager [req-d8780ea7-991f-4215-bca0-95ea7045ac6c req-2bdc9b35-d5bf-4aed-a485-ee135f383fd6 service nova] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] Refreshing instance network info cache due to event network-changed-372b585b-44c4-4862-adf0-76a6931af0cd. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1144.052306] env[69328]: DEBUG oslo_concurrency.lockutils [req-d8780ea7-991f-4215-bca0-95ea7045ac6c req-2bdc9b35-d5bf-4aed-a485-ee135f383fd6 service nova] Acquiring lock "refresh_cache-03f0adc8-d640-4248-be9d-ab4ba0cbe760" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1144.052306] env[69328]: DEBUG oslo_concurrency.lockutils [req-d8780ea7-991f-4215-bca0-95ea7045ac6c req-2bdc9b35-d5bf-4aed-a485-ee135f383fd6 service nova] Acquired lock "refresh_cache-03f0adc8-d640-4248-be9d-ab4ba0cbe760" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1144.052306] env[69328]: DEBUG nova.network.neutron [req-d8780ea7-991f-4215-bca0-95ea7045ac6c req-2bdc9b35-d5bf-4aed-a485-ee135f383fd6 service nova] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] Refreshing network info cache for port 372b585b-44c4-4862-adf0-76a6931af0cd {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1144.122026] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69328) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1144.123237] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.409s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1144.123237] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1144.123237] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Cleaning up deleted instances {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11834}} [ 1144.216340] env[69328]: INFO nova.compute.manager [-] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] Took 1.34 seconds to deallocate network for instance. [ 1144.236539] env[69328]: DEBUG oslo_vmware.api [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': task-3274169, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.249144] env[69328]: DEBUG oslo_vmware.api [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3274168, 'name': PowerOnVM_Task, 'duration_secs': 0.592381} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.249144] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: ff815ffb-3422-469e-9b54-b33502826513] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1144.249144] env[69328]: INFO nova.compute.manager [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: ff815ffb-3422-469e-9b54-b33502826513] Took 7.72 seconds to spawn the instance on the hypervisor. [ 1144.249144] env[69328]: DEBUG nova.compute.manager [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: ff815ffb-3422-469e-9b54-b33502826513] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1144.249144] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e684776f-a845-45a4-81ce-44279fa126fd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.347163] env[69328]: DEBUG nova.compute.manager [None req-2e13d3e3-3ef9-44f8-aad8-2715bf73d241 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1144.348052] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2e13d3e3-3ef9-44f8-aad8-2715bf73d241 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1144.348396] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-700b7589-62de-4aca-9c73-1ffc48e61738 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.358085] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e13d3e3-3ef9-44f8-aad8-2715bf73d241 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1144.358355] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e6b97186-f17d-43ca-81a9-56e82cb3d87f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.364993] env[69328]: DEBUG oslo_vmware.api [None req-2e13d3e3-3ef9-44f8-aad8-2715bf73d241 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Waiting for the task: (returnval){ [ 1144.364993] env[69328]: value = "task-3274171" [ 1144.364993] env[69328]: _type = "Task" [ 1144.364993] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.374259] env[69328]: DEBUG oslo_vmware.api [None req-2e13d3e3-3ef9-44f8-aad8-2715bf73d241 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Task: {'id': task-3274171, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.401722] env[69328]: DEBUG oslo_vmware.api [None req-d8b5509f-1cf6-4ced-97c0-e380ecdac5f6 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3274170, 'name': PowerOffVM_Task, 'duration_secs': 0.325231} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.401929] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8b5509f-1cf6-4ced-97c0-e380ecdac5f6 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a0b663eb-31b0-4de1-94bc-660a7d9c1c7b] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1144.402115] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d8b5509f-1cf6-4ced-97c0-e380ecdac5f6 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a0b663eb-31b0-4de1-94bc-660a7d9c1c7b] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1144.402406] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-62f407b6-83ed-4427-800e-82363f814e94 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.436139] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "refresh_cache-ee3609ea-0855-47c2-874c-349c80419781" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1144.436408] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquired lock "refresh_cache-ee3609ea-0855-47c2-874c-349c80419781" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1144.436615] env[69328]: DEBUG nova.network.neutron [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1144.484817] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d8b5509f-1cf6-4ced-97c0-e380ecdac5f6 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a0b663eb-31b0-4de1-94bc-660a7d9c1c7b] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1144.485093] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d8b5509f-1cf6-4ced-97c0-e380ecdac5f6 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a0b663eb-31b0-4de1-94bc-660a7d9c1c7b] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1144.485299] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8b5509f-1cf6-4ced-97c0-e380ecdac5f6 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Deleting the datastore file [datastore2] a0b663eb-31b0-4de1-94bc-660a7d9c1c7b {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1144.485626] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-03b3c215-f5f1-4919-b704-f9b84609123d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.492461] env[69328]: DEBUG oslo_vmware.api [None req-d8b5509f-1cf6-4ced-97c0-e380ecdac5f6 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for the task: (returnval){ [ 1144.492461] env[69328]: value = "task-3274173" [ 1144.492461] env[69328]: _type = "Task" [ 1144.492461] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.503066] env[69328]: DEBUG oslo_vmware.api [None req-d8b5509f-1cf6-4ced-97c0-e380ecdac5f6 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3274173, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.558765] env[69328]: DEBUG nova.network.neutron [-] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1144.637254] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] There are 48 instances to clean {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11843}} [ 1144.638333] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 32b9acbc-35a0-4d67-ac74-ef46c45fa0b9] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1144.725627] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1f57b610-d4e0-46f3-ab79-daab369f4dee tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1144.726027] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1f57b610-d4e0-46f3-ab79-daab369f4dee tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1144.726155] env[69328]: DEBUG nova.objects.instance [None req-1f57b610-d4e0-46f3-ab79-daab369f4dee tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Lazy-loading 'resources' on Instance uuid 79d66d5d-e1a4-4bc0-8e43-db97153867e3 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1144.734108] env[69328]: DEBUG oslo_vmware.api [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': task-3274169, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.635233} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.734403] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 03f0adc8-d640-4248-be9d-ab4ba0cbe760/03f0adc8-d640-4248-be9d-ab4ba0cbe760.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1144.734651] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1144.734954] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-11c6f06b-ff88-4eb7-8e8e-59e3195d3444 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.743540] env[69328]: DEBUG oslo_vmware.api [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Waiting for the task: (returnval){ [ 1144.743540] env[69328]: value = "task-3274174" [ 1144.743540] env[69328]: _type = "Task" [ 1144.743540] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.752768] env[69328]: DEBUG oslo_vmware.api [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': task-3274174, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.777076] env[69328]: INFO nova.compute.manager [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: ff815ffb-3422-469e-9b54-b33502826513] Took 15.28 seconds to build instance. [ 1144.875541] env[69328]: DEBUG oslo_vmware.api [None req-2e13d3e3-3ef9-44f8-aad8-2715bf73d241 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Task: {'id': task-3274171, 'name': PowerOffVM_Task, 'duration_secs': 0.250927} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.875836] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e13d3e3-3ef9-44f8-aad8-2715bf73d241 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1144.876013] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2e13d3e3-3ef9-44f8-aad8-2715bf73d241 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1144.876285] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6e0a6662-27e4-449d-97a9-3f2c5d89981d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.976019] env[69328]: DEBUG nova.network.neutron [req-d8780ea7-991f-4215-bca0-95ea7045ac6c req-2bdc9b35-d5bf-4aed-a485-ee135f383fd6 service nova] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] Updated VIF entry in instance network info cache for port 372b585b-44c4-4862-adf0-76a6931af0cd. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1144.976415] env[69328]: DEBUG nova.network.neutron [req-d8780ea7-991f-4215-bca0-95ea7045ac6c req-2bdc9b35-d5bf-4aed-a485-ee135f383fd6 service nova] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] Updating instance_info_cache with network_info: [{"id": "372b585b-44c4-4862-adf0-76a6931af0cd", "address": "fa:16:3e:08:df:c0", "network": {"id": "c37f7cbb-9e72-43fb-b82a-5602208856c5", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1726899944-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e9e98f83e974a32b0db6ce5e8442012", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4fc0575-c8e3-4f3c-b2e1-e10ac2d0cc1a", "external-id": "nsx-vlan-transportzone-256", "segmentation_id": 256, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap372b585b-44", "ovs_interfaceid": "372b585b-44c4-4862-adf0-76a6931af0cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1144.978945] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2e13d3e3-3ef9-44f8-aad8-2715bf73d241 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1144.979187] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2e13d3e3-3ef9-44f8-aad8-2715bf73d241 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1144.979333] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e13d3e3-3ef9-44f8-aad8-2715bf73d241 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Deleting the datastore file [datastore1] fb2d04d8-cff6-414c-9d50-3ab61729546d {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1144.979848] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-08c40edf-dddd-4a6a-87ec-4fd0a8539df4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.986902] env[69328]: DEBUG oslo_vmware.api [None req-2e13d3e3-3ef9-44f8-aad8-2715bf73d241 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Waiting for the task: (returnval){ [ 1144.986902] env[69328]: value = "task-3274176" [ 1144.986902] env[69328]: _type = "Task" [ 1144.986902] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.001879] env[69328]: DEBUG oslo_vmware.api [None req-2e13d3e3-3ef9-44f8-aad8-2715bf73d241 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Task: {'id': task-3274176, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.004981] env[69328]: DEBUG oslo_vmware.api [None req-d8b5509f-1cf6-4ced-97c0-e380ecdac5f6 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Task: {'id': task-3274173, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.206248} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.005240] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8b5509f-1cf6-4ced-97c0-e380ecdac5f6 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1145.005458] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d8b5509f-1cf6-4ced-97c0-e380ecdac5f6 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a0b663eb-31b0-4de1-94bc-660a7d9c1c7b] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1145.005620] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d8b5509f-1cf6-4ced-97c0-e380ecdac5f6 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a0b663eb-31b0-4de1-94bc-660a7d9c1c7b] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1145.005805] env[69328]: INFO nova.compute.manager [None req-d8b5509f-1cf6-4ced-97c0-e380ecdac5f6 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] [instance: a0b663eb-31b0-4de1-94bc-660a7d9c1c7b] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1145.006027] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d8b5509f-1cf6-4ced-97c0-e380ecdac5f6 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1145.006223] env[69328]: DEBUG nova.compute.manager [-] [instance: a0b663eb-31b0-4de1-94bc-660a7d9c1c7b] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1145.006318] env[69328]: DEBUG nova.network.neutron [-] [instance: a0b663eb-31b0-4de1-94bc-660a7d9c1c7b] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1145.062273] env[69328]: INFO nova.compute.manager [-] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Took 1.51 seconds to deallocate network for instance. [ 1145.141230] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: aaa9deb3-9a52-43e3-bf9b-a53922439be2] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1145.256594] env[69328]: DEBUG oslo_vmware.api [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': task-3274174, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07319} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.257923] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1145.257923] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fea61b04-6f4a-49b5-93b2-69e7a1b57c99 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.293882] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] Reconfiguring VM instance instance-00000074 to attach disk [datastore2] 03f0adc8-d640-4248-be9d-ab4ba0cbe760/03f0adc8-d640-4248-be9d-ab4ba0cbe760.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1145.299185] env[69328]: DEBUG oslo_concurrency.lockutils [None req-3b6716fb-c11b-4bbf-badc-168f65bcc36a tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "ff815ffb-3422-469e-9b54-b33502826513" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.817s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1145.299826] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c2e95873-3e1a-43c1-8d7a-1d676b242b6a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.314509] env[69328]: DEBUG nova.compute.manager [req-4e3f8748-7743-4a65-b56e-989d9f26e128 req-8feca0e9-6bfb-463b-bf4d-41cfd3a9a891 service nova] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Received event network-vif-deleted-d779425b-180c-47fd-b307-e02e14f18a26 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1145.315800] env[69328]: DEBUG nova.network.neutron [-] [instance: a0b663eb-31b0-4de1-94bc-660a7d9c1c7b] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1145.327800] env[69328]: DEBUG oslo_vmware.api [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Waiting for the task: (returnval){ [ 1145.327800] env[69328]: value = "task-3274177" [ 1145.327800] env[69328]: _type = "Task" [ 1145.327800] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.342289] env[69328]: DEBUG oslo_vmware.api [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': task-3274177, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.398091] env[69328]: DEBUG nova.network.neutron [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Updating instance_info_cache with network_info: [{"id": "ce79bad7-6bfd-4645-bc55-71dfc049411d", "address": "fa:16:3e:c6:38:a6", "network": {"id": "ce3bec03-01f9-4ac9-80e4-bfb944c0bb66", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-545997027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.234", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87581f423dc64e4fb9fe1d51ebc68597", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce79bad7-6b", "ovs_interfaceid": "ce79bad7-6bfd-4645-bc55-71dfc049411d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1145.480229] env[69328]: DEBUG oslo_concurrency.lockutils [req-d8780ea7-991f-4215-bca0-95ea7045ac6c req-2bdc9b35-d5bf-4aed-a485-ee135f383fd6 service nova] Releasing lock "refresh_cache-03f0adc8-d640-4248-be9d-ab4ba0cbe760" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1145.501434] env[69328]: DEBUG oslo_vmware.api [None req-2e13d3e3-3ef9-44f8-aad8-2715bf73d241 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Task: {'id': task-3274176, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.555350] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d139e6c8-4208-4a2e-ac8e-2c2804d70607 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.561092] env[69328]: DEBUG nova.compute.manager [req-0f596bd8-af74-42b8-bd56-28b0781978f4 req-eb135f30-722a-4fa8-ad9c-c875313da4f3 service nova] [instance: ff815ffb-3422-469e-9b54-b33502826513] Received event network-changed-ecf2d696-3969-4c5e-ac8c-0578b4981440 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1145.562125] env[69328]: DEBUG nova.compute.manager [req-0f596bd8-af74-42b8-bd56-28b0781978f4 req-eb135f30-722a-4fa8-ad9c-c875313da4f3 service nova] [instance: ff815ffb-3422-469e-9b54-b33502826513] Refreshing instance network info cache due to event network-changed-ecf2d696-3969-4c5e-ac8c-0578b4981440. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1145.562125] env[69328]: DEBUG oslo_concurrency.lockutils [req-0f596bd8-af74-42b8-bd56-28b0781978f4 req-eb135f30-722a-4fa8-ad9c-c875313da4f3 service nova] Acquiring lock "refresh_cache-ff815ffb-3422-469e-9b54-b33502826513" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1145.562125] env[69328]: DEBUG oslo_concurrency.lockutils [req-0f596bd8-af74-42b8-bd56-28b0781978f4 req-eb135f30-722a-4fa8-ad9c-c875313da4f3 service nova] Acquired lock "refresh_cache-ff815ffb-3422-469e-9b54-b33502826513" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1145.562125] env[69328]: DEBUG nova.network.neutron [req-0f596bd8-af74-42b8-bd56-28b0781978f4 req-eb135f30-722a-4fa8-ad9c-c875313da4f3 service nova] [instance: ff815ffb-3422-469e-9b54-b33502826513] Refreshing network info cache for port ecf2d696-3969-4c5e-ac8c-0578b4981440 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1145.569071] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7edc0959-4076-43e5-8794-ea72fb505190 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.574526] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a4cd4906-8e70-4668-ae51-db77a5a28560 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1145.603886] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c071931e-42a0-47fa-b1b2-d88d4159eaa5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.613427] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1277d9b7-4418-4d39-83b5-528e95dc07bf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.629524] env[69328]: DEBUG nova.compute.provider_tree [None req-1f57b610-d4e0-46f3-ab79-daab369f4dee tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1145.644939] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 5308a4f5-302f-4d48-a3ef-6c0f1a0af7ee] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1145.820616] env[69328]: INFO nova.compute.manager [-] [instance: a0b663eb-31b0-4de1-94bc-660a7d9c1c7b] Took 0.81 seconds to deallocate network for instance. [ 1145.838195] env[69328]: DEBUG oslo_vmware.api [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': task-3274177, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.900408] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Releasing lock "refresh_cache-ee3609ea-0855-47c2-874c-349c80419781" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1146.001635] env[69328]: DEBUG oslo_vmware.api [None req-2e13d3e3-3ef9-44f8-aad8-2715bf73d241 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Task: {'id': task-3274176, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.619376} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.001889] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e13d3e3-3ef9-44f8-aad8-2715bf73d241 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1146.002126] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2e13d3e3-3ef9-44f8-aad8-2715bf73d241 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1146.002296] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2e13d3e3-3ef9-44f8-aad8-2715bf73d241 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1146.002485] env[69328]: INFO nova.compute.manager [None req-2e13d3e3-3ef9-44f8-aad8-2715bf73d241 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Took 1.66 seconds to destroy the instance on the hypervisor. [ 1146.002725] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2e13d3e3-3ef9-44f8-aad8-2715bf73d241 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1146.002913] env[69328]: DEBUG nova.compute.manager [-] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1146.003016] env[69328]: DEBUG nova.network.neutron [-] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1146.132473] env[69328]: DEBUG nova.scheduler.client.report [None req-1f57b610-d4e0-46f3-ab79-daab369f4dee tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1146.149277] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 071c1837-9d0b-4b69-b16e-991b300385fb] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1146.291880] env[69328]: DEBUG nova.network.neutron [req-0f596bd8-af74-42b8-bd56-28b0781978f4 req-eb135f30-722a-4fa8-ad9c-c875313da4f3 service nova] [instance: ff815ffb-3422-469e-9b54-b33502826513] Updated VIF entry in instance network info cache for port ecf2d696-3969-4c5e-ac8c-0578b4981440. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1146.292396] env[69328]: DEBUG nova.network.neutron [req-0f596bd8-af74-42b8-bd56-28b0781978f4 req-eb135f30-722a-4fa8-ad9c-c875313da4f3 service nova] [instance: ff815ffb-3422-469e-9b54-b33502826513] Updating instance_info_cache with network_info: [{"id": "ecf2d696-3969-4c5e-ac8c-0578b4981440", "address": "fa:16:3e:15:2d:17", "network": {"id": "2e8bdd1b-0cca-4f7c-bba3-ff1750073020", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2058593014-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.146", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "088bc9e3aeb449baa0a522342d57d183", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapecf2d696-39", "ovs_interfaceid": "ecf2d696-3969-4c5e-ac8c-0578b4981440", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1146.327728] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d8b5509f-1cf6-4ced-97c0-e380ecdac5f6 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1146.340718] env[69328]: DEBUG oslo_vmware.api [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': task-3274177, 'name': ReconfigVM_Task, 'duration_secs': 0.643499} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.341051] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] Reconfigured VM instance instance-00000074 to attach disk [datastore2] 03f0adc8-d640-4248-be9d-ab4ba0cbe760/03f0adc8-d640-4248-be9d-ab4ba0cbe760.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1146.341770] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3e6d6577-01d8-4163-9ede-964f78b7b299 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.348892] env[69328]: DEBUG oslo_vmware.api [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Waiting for the task: (returnval){ [ 1146.348892] env[69328]: value = "task-3274178" [ 1146.348892] env[69328]: _type = "Task" [ 1146.348892] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.357241] env[69328]: DEBUG oslo_vmware.api [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': task-3274178, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.410335] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7a3fd7c-cab2-46d8-b711-9316368c21d2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.418354] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72b974f0-afad-4c6b-8796-1ac2c4fbb8ea {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.637068] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1f57b610-d4e0-46f3-ab79-daab369f4dee tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.911s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1146.639500] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a4cd4906-8e70-4668-ae51-db77a5a28560 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.065s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1146.639819] env[69328]: DEBUG nova.objects.instance [None req-a4cd4906-8e70-4668-ae51-db77a5a28560 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lazy-loading 'resources' on Instance uuid dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1146.652514] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 14521ee3-d749-48b4-aeec-23c94ca2cf9f] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1146.668286] env[69328]: INFO nova.scheduler.client.report [None req-1f57b610-d4e0-46f3-ab79-daab369f4dee tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Deleted allocations for instance 79d66d5d-e1a4-4bc0-8e43-db97153867e3 [ 1146.794949] env[69328]: DEBUG oslo_concurrency.lockutils [req-0f596bd8-af74-42b8-bd56-28b0781978f4 req-eb135f30-722a-4fa8-ad9c-c875313da4f3 service nova] Releasing lock "refresh_cache-ff815ffb-3422-469e-9b54-b33502826513" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1146.859435] env[69328]: DEBUG oslo_vmware.api [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': task-3274178, 'name': Rename_Task, 'duration_secs': 0.17312} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.859722] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1146.860038] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-acf9d833-42ab-42fb-9fbc-d0d8b220dc87 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.867860] env[69328]: DEBUG oslo_vmware.api [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Waiting for the task: (returnval){ [ 1146.867860] env[69328]: value = "task-3274179" [ 1146.867860] env[69328]: _type = "Task" [ 1146.867860] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.877426] env[69328]: DEBUG oslo_vmware.api [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': task-3274179, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.953997] env[69328]: DEBUG nova.network.neutron [-] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1147.155301] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: ac0f967d-18c8-45d8-94ca-829a1fe11451] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1147.175465] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1f57b610-d4e0-46f3-ab79-daab369f4dee tempest-ServerGroupTestJSON-1291977255 tempest-ServerGroupTestJSON-1291977255-project-member] Lock "79d66d5d-e1a4-4bc0-8e43-db97153867e3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.930s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1147.299267] env[69328]: DEBUG nova.compute.manager [req-76b0741f-56a2-4efc-a668-ec8574e63758 req-c1a6e80e-e597-489d-bf5f-e5e95b148311 service nova] [instance: a0b663eb-31b0-4de1-94bc-660a7d9c1c7b] Received event network-vif-deleted-e5103faf-fa4a-4715-b9eb-8469ebb32b28 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1147.379626] env[69328]: DEBUG oslo_vmware.api [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': task-3274179, 'name': PowerOnVM_Task} progress is 90%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.381569] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dacd1d9-7ff8-49f7-a4f8-566d05f938cd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.393266] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7620d69-ac27-4c20-abad-aecbde7fe49b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.424831] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbcb5443-ebcb-44c4-bdfb-b26254533f86 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.433312] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73810c95-3aca-4538-8a68-c4bbaae28a96 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.448812] env[69328]: DEBUG nova.compute.provider_tree [None req-a4cd4906-8e70-4668-ae51-db77a5a28560 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1147.457443] env[69328]: INFO nova.compute.manager [-] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Took 1.45 seconds to deallocate network for instance. [ 1147.549722] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e62aaf42-9ee7-47f5-b9b9-dc0e05bef14a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.570454] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9628f163-a4d8-4ed7-880b-3f70d31b9b3b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.578749] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Updating instance 'ee3609ea-0855-47c2-874c-349c80419781' progress to 83 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1147.594621] env[69328]: DEBUG nova.compute.manager [req-876a46f9-7a65-4434-bcf3-2a40348543dd req-dcdc5005-81bd-45e5-ae14-401c50063106 service nova] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Received event network-vif-deleted-d0a9a5ba-8927-4de7-892b-8444448e4551 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1147.658357] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 6ad357d9-c35a-4fdb-8dd0-39a0617bf85e] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1147.879058] env[69328]: DEBUG oslo_vmware.api [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': task-3274179, 'name': PowerOnVM_Task, 'duration_secs': 0.588199} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.879437] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1147.879712] env[69328]: INFO nova.compute.manager [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] Took 8.96 seconds to spawn the instance on the hypervisor. [ 1147.879907] env[69328]: DEBUG nova.compute.manager [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1147.880728] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f95788eb-142e-47e9-9d82-a83ea405d006 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.951883] env[69328]: DEBUG nova.scheduler.client.report [None req-a4cd4906-8e70-4668-ae51-db77a5a28560 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1147.962676] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2e13d3e3-3ef9-44f8-aad8-2715bf73d241 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1148.085618] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1148.085933] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8ccfdf81-204e-47c7-bc41-bedcfe339e2e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.095837] env[69328]: DEBUG oslo_vmware.api [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 1148.095837] env[69328]: value = "task-3274180" [ 1148.095837] env[69328]: _type = "Task" [ 1148.095837] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.105516] env[69328]: DEBUG oslo_vmware.api [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3274180, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.161908] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: c7321021-15ea-47f4-a8ca-1045f2966394] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1148.399310] env[69328]: INFO nova.compute.manager [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] Took 17.18 seconds to build instance. [ 1148.457298] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a4cd4906-8e70-4668-ae51-db77a5a28560 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.818s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1148.463249] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d8b5509f-1cf6-4ced-97c0-e380ecdac5f6 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.132s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1148.463739] env[69328]: DEBUG nova.objects.instance [None req-d8b5509f-1cf6-4ced-97c0-e380ecdac5f6 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lazy-loading 'resources' on Instance uuid a0b663eb-31b0-4de1-94bc-660a7d9c1c7b {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1148.487382] env[69328]: INFO nova.scheduler.client.report [None req-a4cd4906-8e70-4668-ae51-db77a5a28560 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Deleted allocations for instance dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34 [ 1148.607864] env[69328]: DEBUG oslo_vmware.api [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3274180, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.666195] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 204286d7-c806-48cb-85e9-b2a78571777c] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1148.902397] env[69328]: DEBUG oslo_concurrency.lockutils [None req-beb529fc-3b83-4771-b182-76ca0722ce80 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Lock "03f0adc8-d640-4248-be9d-ab4ba0cbe760" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.700s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1148.995526] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a4cd4906-8e70-4668-ae51-db77a5a28560 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.090s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1149.107723] env[69328]: DEBUG oslo_vmware.api [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3274180, 'name': PowerOnVM_Task, 'duration_secs': 0.618432} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.108372] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1149.108372] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ab153d95-0873-483d-b388-dd6b95968876 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Updating instance 'ee3609ea-0855-47c2-874c-349c80419781' progress to 100 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1149.172123] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 19f537b7-90fc-4832-b137-e042e00a508b] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1149.212272] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95be062a-fb85-4450-9cf2-7a85a1c11617 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.220112] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fbab2a4-2944-42c3-8ac4-b683c1ae5130 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.254659] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f23b7c9-9fce-4229-b81a-8f6cc7d51d57 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.262819] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-676e3120-3291-4169-bd72-8b0d7aac580b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.277138] env[69328]: DEBUG nova.compute.provider_tree [None req-d8b5509f-1cf6-4ced-97c0-e380ecdac5f6 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1149.675725] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 6b9757de-a274-4f4d-9b73-cc2ca92b4732] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1149.782099] env[69328]: DEBUG nova.scheduler.client.report [None req-d8b5509f-1cf6-4ced-97c0-e380ecdac5f6 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1150.178437] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: dd43adb3-b073-483a-81dd-69df7f746874] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1150.290478] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d8b5509f-1cf6-4ced-97c0-e380ecdac5f6 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.831s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1150.294069] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2e13d3e3-3ef9-44f8-aad8-2715bf73d241 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.331s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1150.294492] env[69328]: DEBUG nova.objects.instance [None req-2e13d3e3-3ef9-44f8-aad8-2715bf73d241 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Lazy-loading 'resources' on Instance uuid fb2d04d8-cff6-414c-9d50-3ab61729546d {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1150.312104] env[69328]: INFO nova.scheduler.client.report [None req-d8b5509f-1cf6-4ced-97c0-e380ecdac5f6 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Deleted allocations for instance a0b663eb-31b0-4de1-94bc-660a7d9c1c7b [ 1150.682125] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: dc050589-e37a-4798-9532-df4ecfab7eb1] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1150.728872] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Acquiring lock "0c83f194-9346-4e24-a0ea-815d0b454ded" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1150.729143] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Lock "0c83f194-9346-4e24-a0ea-815d0b454ded" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1150.821293] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d8b5509f-1cf6-4ced-97c0-e380ecdac5f6 tempest-ServersTestJSON-858906230 tempest-ServersTestJSON-858906230-project-member] Lock "a0b663eb-31b0-4de1-94bc-660a7d9c1c7b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.457s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1150.961207] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Acquiring lock "53eb70f0-1734-4386-b747-014561ba577b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1150.961513] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Lock "53eb70f0-1734-4386-b747-014561ba577b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1150.967675] env[69328]: DEBUG nova.compute.manager [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Stashing vm_state: active {{(pid=69328) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1150.975259] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0baef903-48ec-4b17-bb95-9e4f98e5a4d9 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "ee3609ea-0855-47c2-874c-349c80419781" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1150.975684] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0baef903-48ec-4b17-bb95-9e4f98e5a4d9 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "ee3609ea-0855-47c2-874c-349c80419781" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1150.975940] env[69328]: DEBUG nova.compute.manager [None req-0baef903-48ec-4b17-bb95-9e4f98e5a4d9 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Going to confirm migration 6 {{(pid=69328) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1151.081272] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a85ceea-de53-45f4-82c6-4ae3dba6a55c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.092586] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41996dd6-8526-4e60-be45-f88198af862b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.125334] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b71a2ec-6879-4446-8e85-32d619eab5d1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.134827] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d8dca06-937c-4acc-8c8a-fa9ee0b9d275 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.149114] env[69328]: DEBUG nova.compute.provider_tree [None req-2e13d3e3-3ef9-44f8-aad8-2715bf73d241 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1151.184617] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: a7d4893f-31d4-449d-96d5-a2a1377d8454] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1151.231485] env[69328]: DEBUG nova.compute.manager [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 0c83f194-9346-4e24-a0ea-815d0b454ded] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1151.466137] env[69328]: DEBUG nova.compute.manager [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1151.498064] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1151.502089] env[69328]: DEBUG oslo_concurrency.lockutils [None req-876c3230-90ec-4b39-a383-5719ae934358 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Acquiring lock "03f0adc8-d640-4248-be9d-ab4ba0cbe760" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1151.502367] env[69328]: DEBUG oslo_concurrency.lockutils [None req-876c3230-90ec-4b39-a383-5719ae934358 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Lock "03f0adc8-d640-4248-be9d-ab4ba0cbe760" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1151.502588] env[69328]: DEBUG oslo_concurrency.lockutils [None req-876c3230-90ec-4b39-a383-5719ae934358 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Acquiring lock "03f0adc8-d640-4248-be9d-ab4ba0cbe760-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1151.502779] env[69328]: DEBUG oslo_concurrency.lockutils [None req-876c3230-90ec-4b39-a383-5719ae934358 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Lock "03f0adc8-d640-4248-be9d-ab4ba0cbe760-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1151.502948] env[69328]: DEBUG oslo_concurrency.lockutils [None req-876c3230-90ec-4b39-a383-5719ae934358 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Lock "03f0adc8-d640-4248-be9d-ab4ba0cbe760-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1151.505828] env[69328]: INFO nova.compute.manager [None req-876c3230-90ec-4b39-a383-5719ae934358 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] Terminating instance [ 1151.523494] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0baef903-48ec-4b17-bb95-9e4f98e5a4d9 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "refresh_cache-ee3609ea-0855-47c2-874c-349c80419781" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1151.523691] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0baef903-48ec-4b17-bb95-9e4f98e5a4d9 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquired lock "refresh_cache-ee3609ea-0855-47c2-874c-349c80419781" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1151.523875] env[69328]: DEBUG nova.network.neutron [None req-0baef903-48ec-4b17-bb95-9e4f98e5a4d9 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1151.524076] env[69328]: DEBUG nova.objects.instance [None req-0baef903-48ec-4b17-bb95-9e4f98e5a4d9 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lazy-loading 'info_cache' on Instance uuid ee3609ea-0855-47c2-874c-349c80419781 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1151.653716] env[69328]: DEBUG nova.scheduler.client.report [None req-2e13d3e3-3ef9-44f8-aad8-2715bf73d241 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1151.688641] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 65fccb3f-5e0e-4140-be0a-5ba20f494d50] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1151.751763] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1152.000371] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1152.009331] env[69328]: DEBUG nova.compute.manager [None req-876c3230-90ec-4b39-a383-5719ae934358 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1152.010070] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-876c3230-90ec-4b39-a383-5719ae934358 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1152.010594] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff19ee7a-8676-4228-a4ef-2c2a637f5659 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.018864] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-876c3230-90ec-4b39-a383-5719ae934358 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1152.019715] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-381a471b-ab1e-497b-b254-a10a536dc14e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.029908] env[69328]: DEBUG oslo_vmware.api [None req-876c3230-90ec-4b39-a383-5719ae934358 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Waiting for the task: (returnval){ [ 1152.029908] env[69328]: value = "task-3274181" [ 1152.029908] env[69328]: _type = "Task" [ 1152.029908] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.038722] env[69328]: DEBUG oslo_vmware.api [None req-876c3230-90ec-4b39-a383-5719ae934358 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': task-3274181, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.164011] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2e13d3e3-3ef9-44f8-aad8-2715bf73d241 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.870s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1152.166465] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.674s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1152.193736] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 9f6f8e97-cb21-4984-af08-a63ea4578eef] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1152.196536] env[69328]: INFO nova.scheduler.client.report [None req-2e13d3e3-3ef9-44f8-aad8-2715bf73d241 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Deleted allocations for instance fb2d04d8-cff6-414c-9d50-3ab61729546d [ 1152.540959] env[69328]: DEBUG oslo_vmware.api [None req-876c3230-90ec-4b39-a383-5719ae934358 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': task-3274181, 'name': PowerOffVM_Task, 'duration_secs': 0.412174} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.541231] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-876c3230-90ec-4b39-a383-5719ae934358 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1152.541406] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-876c3230-90ec-4b39-a383-5719ae934358 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1152.541668] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-89eaf98e-7f52-4ea0-ab1a-a1e6dfe2770e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.614934] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-876c3230-90ec-4b39-a383-5719ae934358 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1152.615188] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-876c3230-90ec-4b39-a383-5719ae934358 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1152.615468] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-876c3230-90ec-4b39-a383-5719ae934358 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Deleting the datastore file [datastore2] 03f0adc8-d640-4248-be9d-ab4ba0cbe760 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1152.615772] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8940c466-b171-4b09-bbd3-d08a0b1beec0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.623352] env[69328]: DEBUG oslo_vmware.api [None req-876c3230-90ec-4b39-a383-5719ae934358 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Waiting for the task: (returnval){ [ 1152.623352] env[69328]: value = "task-3274183" [ 1152.623352] env[69328]: _type = "Task" [ 1152.623352] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.632954] env[69328]: DEBUG oslo_vmware.api [None req-876c3230-90ec-4b39-a383-5719ae934358 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': task-3274183, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.676425] env[69328]: INFO nova.compute.claims [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1152.698785] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: b61436f5-0e8b-4da5-9459-cf9487dfd23f] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1152.717343] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2e13d3e3-3ef9-44f8-aad8-2715bf73d241 tempest-ServersV294TestFqdnHostnames-2085437736 tempest-ServersV294TestFqdnHostnames-2085437736-project-member] Lock "fb2d04d8-cff6-414c-9d50-3ab61729546d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.881s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1152.869393] env[69328]: DEBUG nova.network.neutron [None req-0baef903-48ec-4b17-bb95-9e4f98e5a4d9 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Updating instance_info_cache with network_info: [{"id": "ce79bad7-6bfd-4645-bc55-71dfc049411d", "address": "fa:16:3e:c6:38:a6", "network": {"id": "ce3bec03-01f9-4ac9-80e4-bfb944c0bb66", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-545997027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.234", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87581f423dc64e4fb9fe1d51ebc68597", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1768af3d-3317-4ef5-b484-0c2707d63de7", "external-id": "nsx-vlan-transportzone-706", "segmentation_id": 706, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce79bad7-6b", "ovs_interfaceid": "ce79bad7-6bfd-4645-bc55-71dfc049411d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1152.943546] env[69328]: DEBUG oslo_concurrency.lockutils [None req-32dc520c-06ee-46c3-af61-0b18a717beb0 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "interface-de8e6616-0460-4a6e-918c-a27818da96e2-571eed05-9f96-46fe-9592-59e38c00196c" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1152.943754] env[69328]: DEBUG oslo_concurrency.lockutils [None req-32dc520c-06ee-46c3-af61-0b18a717beb0 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "interface-de8e6616-0460-4a6e-918c-a27818da96e2-571eed05-9f96-46fe-9592-59e38c00196c" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1152.944133] env[69328]: DEBUG nova.objects.instance [None req-32dc520c-06ee-46c3-af61-0b18a717beb0 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lazy-loading 'flavor' on Instance uuid de8e6616-0460-4a6e-918c-a27818da96e2 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1153.133577] env[69328]: DEBUG oslo_vmware.api [None req-876c3230-90ec-4b39-a383-5719ae934358 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': task-3274183, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.181828} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.133834] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-876c3230-90ec-4b39-a383-5719ae934358 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1153.134034] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-876c3230-90ec-4b39-a383-5719ae934358 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1153.134216] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-876c3230-90ec-4b39-a383-5719ae934358 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1153.134419] env[69328]: INFO nova.compute.manager [None req-876c3230-90ec-4b39-a383-5719ae934358 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1153.134687] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-876c3230-90ec-4b39-a383-5719ae934358 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1153.134887] env[69328]: DEBUG nova.compute.manager [-] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1153.134982] env[69328]: DEBUG nova.network.neutron [-] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1153.183255] env[69328]: INFO nova.compute.resource_tracker [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Updating resource usage from migration c892a2f2-f997-40ad-a8ba-b86ecfe7a6fb [ 1153.201228] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 51a9c492-6f91-4186-b550-ef12284b8a84] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1153.372215] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0baef903-48ec-4b17-bb95-9e4f98e5a4d9 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Releasing lock "refresh_cache-ee3609ea-0855-47c2-874c-349c80419781" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1153.372215] env[69328]: DEBUG nova.objects.instance [None req-0baef903-48ec-4b17-bb95-9e4f98e5a4d9 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lazy-loading 'migration_context' on Instance uuid ee3609ea-0855-47c2-874c-349c80419781 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1153.466221] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e625daa-daf0-42d5-95cf-f631e263e013 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.474500] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b90f320-374d-4070-8784-9ce819f2ef0a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.515899] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59f86c2e-66d1-4d33-9501-c619b21c01a5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.526427] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76fae2b5-5e27-4e07-82b7-4fb7c0a8f90e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.541118] env[69328]: DEBUG nova.compute.provider_tree [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1153.627748] env[69328]: DEBUG nova.objects.instance [None req-32dc520c-06ee-46c3-af61-0b18a717beb0 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lazy-loading 'pci_requests' on Instance uuid de8e6616-0460-4a6e-918c-a27818da96e2 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1153.707073] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 772ab9b3-23ac-46c6-acb1-af0b2726fd90] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1153.757830] env[69328]: DEBUG nova.compute.manager [req-40e9c727-9e38-4b34-827a-9ff95a83c3db req-571904ce-f691-45cf-86e0-4a34ab82f451 service nova] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] Received event network-vif-deleted-372b585b-44c4-4862-adf0-76a6931af0cd {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1153.758036] env[69328]: INFO nova.compute.manager [req-40e9c727-9e38-4b34-827a-9ff95a83c3db req-571904ce-f691-45cf-86e0-4a34ab82f451 service nova] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] Neutron deleted interface 372b585b-44c4-4862-adf0-76a6931af0cd; detaching it from the instance and deleting it from the info cache [ 1153.758221] env[69328]: DEBUG nova.network.neutron [req-40e9c727-9e38-4b34-827a-9ff95a83c3db req-571904ce-f691-45cf-86e0-4a34ab82f451 service nova] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1153.877281] env[69328]: DEBUG nova.objects.base [None req-0baef903-48ec-4b17-bb95-9e4f98e5a4d9 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=69328) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1153.878534] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-104600a3-3f45-41fa-ba1d-eb75db3ce6b6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.902022] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48823c3f-1236-4591-85fb-6bc4f4855b33 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.910444] env[69328]: DEBUG oslo_vmware.api [None req-0baef903-48ec-4b17-bb95-9e4f98e5a4d9 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 1153.910444] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ae500a-3b98-9c0d-50c6-2cddb18c4239" [ 1153.910444] env[69328]: _type = "Task" [ 1153.910444] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.920835] env[69328]: DEBUG oslo_vmware.api [None req-0baef903-48ec-4b17-bb95-9e4f98e5a4d9 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ae500a-3b98-9c0d-50c6-2cddb18c4239, 'name': SearchDatastore_Task, 'duration_secs': 0.0082} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.920835] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0baef903-48ec-4b17-bb95-9e4f98e5a4d9 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1154.044847] env[69328]: DEBUG nova.scheduler.client.report [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1154.133239] env[69328]: DEBUG nova.objects.base [None req-32dc520c-06ee-46c3-af61-0b18a717beb0 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=69328) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1154.133485] env[69328]: DEBUG nova.network.neutron [None req-32dc520c-06ee-46c3-af61-0b18a717beb0 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1154.181929] env[69328]: DEBUG nova.network.neutron [-] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1154.209964] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 7d28bfe2-a9a0-4e6b-a01c-6ab1f1e737ea] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1154.229703] env[69328]: DEBUG nova.policy [None req-32dc520c-06ee-46c3-af61-0b18a717beb0 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '69ca01fd1d0f42b0b05a5426da9753ad', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '30209bc93a4042488f15c73b7e4733d5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 1154.261208] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0889e733-92ff-4754-bed5-13d8c0bc8949 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.273063] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aef0058f-4d6c-4402-8e4b-24849325ee2b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.309947] env[69328]: DEBUG nova.compute.manager [req-40e9c727-9e38-4b34-827a-9ff95a83c3db req-571904ce-f691-45cf-86e0-4a34ab82f451 service nova] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] Detach interface failed, port_id=372b585b-44c4-4862-adf0-76a6931af0cd, reason: Instance 03f0adc8-d640-4248-be9d-ab4ba0cbe760 could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1154.550419] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.384s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1154.550685] env[69328]: INFO nova.compute.manager [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Migrating [ 1154.557267] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.806s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1154.558745] env[69328]: INFO nova.compute.claims [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 0c83f194-9346-4e24-a0ea-815d0b454ded] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1154.685811] env[69328]: INFO nova.compute.manager [-] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] Took 1.55 seconds to deallocate network for instance. [ 1154.714232] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: a95d01cf-c26b-466c-a5b6-a7e43f0321fa] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1155.071831] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "refresh_cache-f1be93b2-08db-41fe-87c4-f4e5f964cfa4" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1155.072009] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquired lock "refresh_cache-f1be93b2-08db-41fe-87c4-f4e5f964cfa4" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1155.072208] env[69328]: DEBUG nova.network.neutron [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1155.192950] env[69328]: DEBUG oslo_concurrency.lockutils [None req-876c3230-90ec-4b39-a383-5719ae934358 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1155.218695] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 65e38a02-880b-46e2-8866-645a9fc17c7a] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1155.722354] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 96f604a9-e42c-4aa8-b5b5-edcb34901d94] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1155.848556] env[69328]: DEBUG nova.network.neutron [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Updating instance_info_cache with network_info: [{"id": "1018560a-13d7-4d01-8fc4-03d0b9beab90", "address": "fa:16:3e:33:ba:27", "network": {"id": "4031def8-0553-461f-9528-aa338b9d7b2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1834835647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.196", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f357b5a9494b4849a83aa934c5d4e26b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "357d2811-e990-4985-9f9e-b158d10d3699", "external-id": "nsx-vlan-transportzone-641", "segmentation_id": 641, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1018560a-13", "ovs_interfaceid": "1018560a-13d7-4d01-8fc4-03d0b9beab90", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1155.880390] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-448efb78-9ee5-4889-a5e5-5def4be34b33 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.888764] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-014d4d9a-9103-45a9-abbc-40188aa32dc9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.924069] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73bdd7d6-9d50-4b4f-90ce-fc2348c5a259 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.930912] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23d008ed-3827-4551-858b-dbf39b17e670 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.945767] env[69328]: DEBUG nova.compute.provider_tree [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1155.975021] env[69328]: DEBUG nova.compute.manager [req-68c65eaf-4575-424d-bb8b-8ffaebb0765c req-40901764-5428-4b71-9e0c-6649ccbced1f service nova] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Received event network-vif-plugged-571eed05-9f96-46fe-9592-59e38c00196c {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1155.975241] env[69328]: DEBUG oslo_concurrency.lockutils [req-68c65eaf-4575-424d-bb8b-8ffaebb0765c req-40901764-5428-4b71-9e0c-6649ccbced1f service nova] Acquiring lock "de8e6616-0460-4a6e-918c-a27818da96e2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1155.975468] env[69328]: DEBUG oslo_concurrency.lockutils [req-68c65eaf-4575-424d-bb8b-8ffaebb0765c req-40901764-5428-4b71-9e0c-6649ccbced1f service nova] Lock "de8e6616-0460-4a6e-918c-a27818da96e2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1155.975660] env[69328]: DEBUG oslo_concurrency.lockutils [req-68c65eaf-4575-424d-bb8b-8ffaebb0765c req-40901764-5428-4b71-9e0c-6649ccbced1f service nova] Lock "de8e6616-0460-4a6e-918c-a27818da96e2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1155.975781] env[69328]: DEBUG nova.compute.manager [req-68c65eaf-4575-424d-bb8b-8ffaebb0765c req-40901764-5428-4b71-9e0c-6649ccbced1f service nova] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] No waiting events found dispatching network-vif-plugged-571eed05-9f96-46fe-9592-59e38c00196c {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1155.976126] env[69328]: WARNING nova.compute.manager [req-68c65eaf-4575-424d-bb8b-8ffaebb0765c req-40901764-5428-4b71-9e0c-6649ccbced1f service nova] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Received unexpected event network-vif-plugged-571eed05-9f96-46fe-9592-59e38c00196c for instance with vm_state active and task_state None. [ 1156.069905] env[69328]: DEBUG nova.network.neutron [None req-32dc520c-06ee-46c3-af61-0b18a717beb0 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Successfully updated port: 571eed05-9f96-46fe-9592-59e38c00196c {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1156.227623] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 9ad2b2e3-460a-403e-bfc7-f46648c93849] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1156.352537] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Releasing lock "refresh_cache-f1be93b2-08db-41fe-87c4-f4e5f964cfa4" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1156.448764] env[69328]: DEBUG nova.scheduler.client.report [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1156.573316] env[69328]: DEBUG oslo_concurrency.lockutils [None req-32dc520c-06ee-46c3-af61-0b18a717beb0 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "refresh_cache-de8e6616-0460-4a6e-918c-a27818da96e2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1156.573580] env[69328]: DEBUG oslo_concurrency.lockutils [None req-32dc520c-06ee-46c3-af61-0b18a717beb0 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquired lock "refresh_cache-de8e6616-0460-4a6e-918c-a27818da96e2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1156.573819] env[69328]: DEBUG nova.network.neutron [None req-32dc520c-06ee-46c3-af61-0b18a717beb0 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1156.731417] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 82e27131-b401-4885-83fb-825e5c8e2444] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1156.748426] env[69328]: DEBUG oslo_concurrency.lockutils [None req-75533f09-8339-447d-b801-42818e769a94 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquiring lock "52c87371-4142-40d6-ac68-804aabd9f823" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1156.748426] env[69328]: DEBUG oslo_concurrency.lockutils [None req-75533f09-8339-447d-b801-42818e769a94 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Lock "52c87371-4142-40d6-ac68-804aabd9f823" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1156.953875] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.397s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1156.954510] env[69328]: DEBUG nova.compute.manager [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 0c83f194-9346-4e24-a0ea-815d0b454ded] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1156.959675] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.959s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1156.965975] env[69328]: INFO nova.compute.claims [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1157.112707] env[69328]: WARNING nova.network.neutron [None req-32dc520c-06ee-46c3-af61-0b18a717beb0 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] aed15283-4a79-4e99-8b6c-49cf754138de already exists in list: networks containing: ['aed15283-4a79-4e99-8b6c-49cf754138de']. ignoring it [ 1157.239483] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 0a485411-3206-4674-90e4-58df4a8b755a] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1157.251148] env[69328]: INFO nova.compute.manager [None req-75533f09-8339-447d-b801-42818e769a94 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Detaching volume 3aa45c90-619e-4b44-982d-0c10542c37fe [ 1157.301384] env[69328]: INFO nova.virt.block_device [None req-75533f09-8339-447d-b801-42818e769a94 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Attempting to driver detach volume 3aa45c90-619e-4b44-982d-0c10542c37fe from mountpoint /dev/sdb [ 1157.301632] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-75533f09-8339-447d-b801-42818e769a94 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Volume detach. Driver type: vmdk {{(pid=69328) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1157.301823] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-75533f09-8339-447d-b801-42818e769a94 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653936', 'volume_id': '3aa45c90-619e-4b44-982d-0c10542c37fe', 'name': 'volume-3aa45c90-619e-4b44-982d-0c10542c37fe', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '52c87371-4142-40d6-ac68-804aabd9f823', 'attached_at': '', 'detached_at': '', 'volume_id': '3aa45c90-619e-4b44-982d-0c10542c37fe', 'serial': '3aa45c90-619e-4b44-982d-0c10542c37fe'} {{(pid=69328) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1157.302702] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b61c2f20-98fc-46f1-8b79-b5ba5444e05e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.327919] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f59a58d5-3719-4816-a2aa-46621b0535a5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.337257] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5e3015c-0565-46c1-9607-fb1ac388df8f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.368163] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f82d5448-7176-4711-8160-6f7eed1810bc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.383168] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-75533f09-8339-447d-b801-42818e769a94 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] The volume has not been displaced from its original location: [datastore1] volume-3aa45c90-619e-4b44-982d-0c10542c37fe/volume-3aa45c90-619e-4b44-982d-0c10542c37fe.vmdk. No consolidation needed. {{(pid=69328) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1157.389149] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-75533f09-8339-447d-b801-42818e769a94 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Reconfiguring VM instance instance-00000056 to detach disk 2001 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1157.390423] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d5fb966e-3bb2-416b-b418-7ceab9ba06c8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.412344] env[69328]: DEBUG oslo_vmware.api [None req-75533f09-8339-447d-b801-42818e769a94 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 1157.412344] env[69328]: value = "task-3274184" [ 1157.412344] env[69328]: _type = "Task" [ 1157.412344] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.422256] env[69328]: DEBUG oslo_vmware.api [None req-75533f09-8339-447d-b801-42818e769a94 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3274184, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.467951] env[69328]: DEBUG nova.compute.utils [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1157.472669] env[69328]: DEBUG nova.network.neutron [None req-32dc520c-06ee-46c3-af61-0b18a717beb0 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Updating instance_info_cache with network_info: [{"id": "13436ecc-0cb3-4c13-bf18-f81195196ffd", "address": "fa:16:3e:2e:1b:14", "network": {"id": "aed15283-4a79-4e99-8b6c-49cf754138de", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1271042528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30209bc93a4042488f15c73b7e4733d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13436ecc-0c", "ovs_interfaceid": "13436ecc-0cb3-4c13-bf18-f81195196ffd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "571eed05-9f96-46fe-9592-59e38c00196c", "address": "fa:16:3e:dc:0e:cb", "network": {"id": "aed15283-4a79-4e99-8b6c-49cf754138de", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1271042528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30209bc93a4042488f15c73b7e4733d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap571eed05-9f", "ovs_interfaceid": "571eed05-9f96-46fe-9592-59e38c00196c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1157.477668] env[69328]: DEBUG nova.compute.manager [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 0c83f194-9346-4e24-a0ea-815d0b454ded] Not allocating networking since 'none' was specified. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1157.745884] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: b21ff3c9-d53a-4065-a271-682c2f1b895d] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1157.866894] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3737ed66-5f68-44b4-89b2-e311ab4a91c9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.887308] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Updating instance 'f1be93b2-08db-41fe-87c4-f4e5f964cfa4' progress to 0 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1157.922742] env[69328]: DEBUG oslo_vmware.api [None req-75533f09-8339-447d-b801-42818e769a94 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3274184, 'name': ReconfigVM_Task, 'duration_secs': 0.252732} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.923047] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-75533f09-8339-447d-b801-42818e769a94 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Reconfigured VM instance instance-00000056 to detach disk 2001 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1157.927891] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c5314747-ab04-412c-bb72-1f7c24bda4ee {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.943970] env[69328]: DEBUG oslo_vmware.api [None req-75533f09-8339-447d-b801-42818e769a94 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 1157.943970] env[69328]: value = "task-3274185" [ 1157.943970] env[69328]: _type = "Task" [ 1157.943970] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.955100] env[69328]: DEBUG oslo_vmware.api [None req-75533f09-8339-447d-b801-42818e769a94 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3274185, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.979048] env[69328]: DEBUG oslo_concurrency.lockutils [None req-32dc520c-06ee-46c3-af61-0b18a717beb0 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Releasing lock "refresh_cache-de8e6616-0460-4a6e-918c-a27818da96e2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1157.979420] env[69328]: DEBUG oslo_concurrency.lockutils [None req-32dc520c-06ee-46c3-af61-0b18a717beb0 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "de8e6616-0460-4a6e-918c-a27818da96e2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1157.979583] env[69328]: DEBUG oslo_concurrency.lockutils [None req-32dc520c-06ee-46c3-af61-0b18a717beb0 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquired lock "de8e6616-0460-4a6e-918c-a27818da96e2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1157.980059] env[69328]: DEBUG nova.compute.manager [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 0c83f194-9346-4e24-a0ea-815d0b454ded] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1157.983039] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ddc6c84-750f-4d4e-a2b0-0a45874a385a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.005685] env[69328]: DEBUG nova.virt.hardware [None req-32dc520c-06ee-46c3-af61-0b18a717beb0 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1158.005917] env[69328]: DEBUG nova.virt.hardware [None req-32dc520c-06ee-46c3-af61-0b18a717beb0 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1158.006083] env[69328]: DEBUG nova.virt.hardware [None req-32dc520c-06ee-46c3-af61-0b18a717beb0 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1158.006285] env[69328]: DEBUG nova.virt.hardware [None req-32dc520c-06ee-46c3-af61-0b18a717beb0 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1158.006400] env[69328]: DEBUG nova.virt.hardware [None req-32dc520c-06ee-46c3-af61-0b18a717beb0 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1158.006539] env[69328]: DEBUG nova.virt.hardware [None req-32dc520c-06ee-46c3-af61-0b18a717beb0 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1158.006738] env[69328]: DEBUG nova.virt.hardware [None req-32dc520c-06ee-46c3-af61-0b18a717beb0 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1158.008033] env[69328]: DEBUG nova.virt.hardware [None req-32dc520c-06ee-46c3-af61-0b18a717beb0 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1158.008274] env[69328]: DEBUG nova.virt.hardware [None req-32dc520c-06ee-46c3-af61-0b18a717beb0 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1158.008468] env[69328]: DEBUG nova.virt.hardware [None req-32dc520c-06ee-46c3-af61-0b18a717beb0 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1158.008699] env[69328]: DEBUG nova.virt.hardware [None req-32dc520c-06ee-46c3-af61-0b18a717beb0 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1158.015256] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-32dc520c-06ee-46c3-af61-0b18a717beb0 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Reconfiguring VM to attach interface {{(pid=69328) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1158.019695] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6a3f5e87-0666-4116-998e-9a9d2d92bbb5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.034988] env[69328]: DEBUG nova.compute.manager [req-75cddd17-cce0-46a9-a453-4f0a87dbab35 req-50cd83d7-7a1d-46db-bbaa-5c18fd9a44cc service nova] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Received event network-changed-571eed05-9f96-46fe-9592-59e38c00196c {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1158.035197] env[69328]: DEBUG nova.compute.manager [req-75cddd17-cce0-46a9-a453-4f0a87dbab35 req-50cd83d7-7a1d-46db-bbaa-5c18fd9a44cc service nova] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Refreshing instance network info cache due to event network-changed-571eed05-9f96-46fe-9592-59e38c00196c. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1158.035430] env[69328]: DEBUG oslo_concurrency.lockutils [req-75cddd17-cce0-46a9-a453-4f0a87dbab35 req-50cd83d7-7a1d-46db-bbaa-5c18fd9a44cc service nova] Acquiring lock "refresh_cache-de8e6616-0460-4a6e-918c-a27818da96e2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1158.035595] env[69328]: DEBUG oslo_concurrency.lockutils [req-75cddd17-cce0-46a9-a453-4f0a87dbab35 req-50cd83d7-7a1d-46db-bbaa-5c18fd9a44cc service nova] Acquired lock "refresh_cache-de8e6616-0460-4a6e-918c-a27818da96e2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1158.035758] env[69328]: DEBUG nova.network.neutron [req-75cddd17-cce0-46a9-a453-4f0a87dbab35 req-50cd83d7-7a1d-46db-bbaa-5c18fd9a44cc service nova] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Refreshing network info cache for port 571eed05-9f96-46fe-9592-59e38c00196c {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1158.043415] env[69328]: DEBUG oslo_vmware.api [None req-32dc520c-06ee-46c3-af61-0b18a717beb0 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for the task: (returnval){ [ 1158.043415] env[69328]: value = "task-3274186" [ 1158.043415] env[69328]: _type = "Task" [ 1158.043415] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.055770] env[69328]: DEBUG oslo_vmware.api [None req-32dc520c-06ee-46c3-af61-0b18a717beb0 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274186, 'name': ReconfigVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.248916] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 76210566-12d7-4f6a-afa1-6329e87e0f85] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1158.273538] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aa96da8-7209-4dec-b4b7-e1420911bd08 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.282520] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c451409c-69ee-4e7a-bb68-955742dd6968 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.316874] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65f77e52-b1d6-4dde-bdcb-874edb09ab03 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.324796] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-521b56c5-9050-4450-8409-20b138f9b8b5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.338118] env[69328]: DEBUG nova.compute.provider_tree [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1158.394445] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1158.394755] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b6938974-cea1-4789-8cd5-41d11c88cea6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.401995] env[69328]: DEBUG oslo_vmware.api [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1158.401995] env[69328]: value = "task-3274187" [ 1158.401995] env[69328]: _type = "Task" [ 1158.401995] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.410657] env[69328]: DEBUG oslo_vmware.api [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274187, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.453879] env[69328]: DEBUG oslo_vmware.api [None req-75533f09-8339-447d-b801-42818e769a94 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3274185, 'name': ReconfigVM_Task, 'duration_secs': 0.140817} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.454235] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-75533f09-8339-447d-b801-42818e769a94 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653936', 'volume_id': '3aa45c90-619e-4b44-982d-0c10542c37fe', 'name': 'volume-3aa45c90-619e-4b44-982d-0c10542c37fe', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '52c87371-4142-40d6-ac68-804aabd9f823', 'attached_at': '', 'detached_at': '', 'volume_id': '3aa45c90-619e-4b44-982d-0c10542c37fe', 'serial': '3aa45c90-619e-4b44-982d-0c10542c37fe'} {{(pid=69328) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1158.562991] env[69328]: DEBUG oslo_vmware.api [None req-32dc520c-06ee-46c3-af61-0b18a717beb0 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274186, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.584387] env[69328]: DEBUG oslo_concurrency.lockutils [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquiring lock "0cf68559-5f07-4006-9f7f-59027e31635d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1158.584667] env[69328]: DEBUG oslo_concurrency.lockutils [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "0cf68559-5f07-4006-9f7f-59027e31635d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1158.753585] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 1f568ba1-8591-499b-b1ee-da16e26f81fc] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1158.821320] env[69328]: DEBUG nova.network.neutron [req-75cddd17-cce0-46a9-a453-4f0a87dbab35 req-50cd83d7-7a1d-46db-bbaa-5c18fd9a44cc service nova] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Updated VIF entry in instance network info cache for port 571eed05-9f96-46fe-9592-59e38c00196c. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1158.821769] env[69328]: DEBUG nova.network.neutron [req-75cddd17-cce0-46a9-a453-4f0a87dbab35 req-50cd83d7-7a1d-46db-bbaa-5c18fd9a44cc service nova] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Updating instance_info_cache with network_info: [{"id": "13436ecc-0cb3-4c13-bf18-f81195196ffd", "address": "fa:16:3e:2e:1b:14", "network": {"id": "aed15283-4a79-4e99-8b6c-49cf754138de", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1271042528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30209bc93a4042488f15c73b7e4733d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13436ecc-0c", "ovs_interfaceid": "13436ecc-0cb3-4c13-bf18-f81195196ffd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "571eed05-9f96-46fe-9592-59e38c00196c", "address": "fa:16:3e:dc:0e:cb", "network": {"id": "aed15283-4a79-4e99-8b6c-49cf754138de", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1271042528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30209bc93a4042488f15c73b7e4733d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap571eed05-9f", "ovs_interfaceid": "571eed05-9f96-46fe-9592-59e38c00196c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1158.840546] env[69328]: DEBUG nova.scheduler.client.report [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1158.912637] env[69328]: DEBUG oslo_vmware.api [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274187, 'name': PowerOffVM_Task, 'duration_secs': 0.429698} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.912897] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1158.913094] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Updating instance 'f1be93b2-08db-41fe-87c4-f4e5f964cfa4' progress to 17 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1158.999017] env[69328]: DEBUG nova.compute.manager [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 0c83f194-9346-4e24-a0ea-815d0b454ded] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1159.010441] env[69328]: DEBUG nova.objects.instance [None req-75533f09-8339-447d-b801-42818e769a94 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Lazy-loading 'flavor' on Instance uuid 52c87371-4142-40d6-ac68-804aabd9f823 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1159.036752] env[69328]: DEBUG nova.virt.hardware [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1159.036988] env[69328]: DEBUG nova.virt.hardware [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1159.037163] env[69328]: DEBUG nova.virt.hardware [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1159.037345] env[69328]: DEBUG nova.virt.hardware [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1159.037492] env[69328]: DEBUG nova.virt.hardware [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1159.037637] env[69328]: DEBUG nova.virt.hardware [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1159.037844] env[69328]: DEBUG nova.virt.hardware [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1159.038008] env[69328]: DEBUG nova.virt.hardware [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1159.038189] env[69328]: DEBUG nova.virt.hardware [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1159.038348] env[69328]: DEBUG nova.virt.hardware [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1159.038517] env[69328]: DEBUG nova.virt.hardware [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1159.039579] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9be93d4e-ca16-420d-9bb9-7ff94c2ed681 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.047793] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a56aab16-7ae5-4e97-b995-7871fe0967f1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.065013] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 0c83f194-9346-4e24-a0ea-815d0b454ded] Instance VIF info [] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1159.074252] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Creating folder: Project (63b48013ecfe4413bf7821e7f8a92d59). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1159.077548] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-36570ba3-3a7f-41a3-9d3b-e8cb5fa1e487 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.079135] env[69328]: DEBUG oslo_vmware.api [None req-32dc520c-06ee-46c3-af61-0b18a717beb0 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274186, 'name': ReconfigVM_Task, 'duration_secs': 0.631036} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.079611] env[69328]: DEBUG oslo_concurrency.lockutils [None req-32dc520c-06ee-46c3-af61-0b18a717beb0 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Releasing lock "de8e6616-0460-4a6e-918c-a27818da96e2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1159.079817] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-32dc520c-06ee-46c3-af61-0b18a717beb0 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Reconfigured VM to attach interface {{(pid=69328) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1159.087019] env[69328]: DEBUG nova.compute.manager [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1159.093035] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Created folder: Project (63b48013ecfe4413bf7821e7f8a92d59) in parent group-v653649. [ 1159.093035] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Creating folder: Instances. Parent ref: group-v653957. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1159.093035] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-50d7330d-e720-412e-9b8a-4f2d592874ae {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.100918] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Created folder: Instances in parent group-v653957. [ 1159.101331] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1159.101578] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0c83f194-9346-4e24-a0ea-815d0b454ded] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1159.101779] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c08f0437-4d79-481d-a06d-3857e046af03 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.118417] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1159.118417] env[69328]: value = "task-3274190" [ 1159.118417] env[69328]: _type = "Task" [ 1159.118417] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.125931] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274190, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.257727] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 73d5b248-3c3e-4e38-8d9c-1f9bfdb38494] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1159.325087] env[69328]: DEBUG oslo_concurrency.lockutils [req-75cddd17-cce0-46a9-a453-4f0a87dbab35 req-50cd83d7-7a1d-46db-bbaa-5c18fd9a44cc service nova] Releasing lock "refresh_cache-de8e6616-0460-4a6e-918c-a27818da96e2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1159.345291] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.386s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1159.345838] env[69328]: DEBUG nova.compute.manager [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1159.348520] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0baef903-48ec-4b17-bb95-9e4f98e5a4d9 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 5.429s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1159.421854] env[69328]: DEBUG nova.virt.hardware [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:34:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1159.422146] env[69328]: DEBUG nova.virt.hardware [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1159.422303] env[69328]: DEBUG nova.virt.hardware [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1159.422520] env[69328]: DEBUG nova.virt.hardware [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1159.422679] env[69328]: DEBUG nova.virt.hardware [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1159.422844] env[69328]: DEBUG nova.virt.hardware [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1159.423065] env[69328]: DEBUG nova.virt.hardware [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1159.423232] env[69328]: DEBUG nova.virt.hardware [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1159.423414] env[69328]: DEBUG nova.virt.hardware [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1159.423598] env[69328]: DEBUG nova.virt.hardware [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1159.423755] env[69328]: DEBUG nova.virt.hardware [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1159.428874] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3bc32a3e-6bd8-45fb-91c9-7b1544834322 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.444819] env[69328]: DEBUG oslo_vmware.api [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1159.444819] env[69328]: value = "task-3274191" [ 1159.444819] env[69328]: _type = "Task" [ 1159.444819] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.452497] env[69328]: DEBUG oslo_vmware.api [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274191, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.584152] env[69328]: DEBUG oslo_concurrency.lockutils [None req-32dc520c-06ee-46c3-af61-0b18a717beb0 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "interface-de8e6616-0460-4a6e-918c-a27818da96e2-571eed05-9f96-46fe-9592-59e38c00196c" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.640s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1159.610473] env[69328]: DEBUG oslo_concurrency.lockutils [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1159.628400] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274190, 'name': CreateVM_Task, 'duration_secs': 0.315804} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.628595] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0c83f194-9346-4e24-a0ea-815d0b454ded] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1159.629026] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1159.629198] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1159.629546] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1159.629810] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3503cae9-d60b-482e-8ca6-c24410a5de89 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.634257] env[69328]: DEBUG oslo_vmware.api [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Waiting for the task: (returnval){ [ 1159.634257] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5285ea32-4277-8307-c5f2-9f0c264ee5a5" [ 1159.634257] env[69328]: _type = "Task" [ 1159.634257] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.642232] env[69328]: DEBUG oslo_vmware.api [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5285ea32-4277-8307-c5f2-9f0c264ee5a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.761030] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 20f750d7-1914-49bb-802f-464a30ffcf3a] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1159.852503] env[69328]: DEBUG nova.compute.utils [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1159.859026] env[69328]: DEBUG nova.compute.manager [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Not allocating networking since 'none' was specified. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1159.956872] env[69328]: DEBUG oslo_vmware.api [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274191, 'name': ReconfigVM_Task, 'duration_secs': 0.15012} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.957338] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Updating instance 'f1be93b2-08db-41fe-87c4-f4e5f964cfa4' progress to 33 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1160.020461] env[69328]: DEBUG oslo_concurrency.lockutils [None req-75533f09-8339-447d-b801-42818e769a94 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Lock "52c87371-4142-40d6-ac68-804aabd9f823" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.273s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1160.103725] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bba0828e-ab66-498b-9efa-dadb50f9a018 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.112552] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54f797d8-6acf-46a7-9523-bac49a7a9838 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.146438] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d279c93c-7265-495c-8f5b-c12cfb72a14b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.155896] env[69328]: DEBUG oslo_vmware.api [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5285ea32-4277-8307-c5f2-9f0c264ee5a5, 'name': SearchDatastore_Task, 'duration_secs': 0.039766} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.157100] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3320e2ea-c47a-4a92-a415-b58b6f7f9049 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.160737] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1160.160969] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 0c83f194-9346-4e24-a0ea-815d0b454ded] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1160.161215] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1160.161364] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1160.161542] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1160.161771] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2523e2c6-b962-4a85-b047-3dacd846ca45 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.174506] env[69328]: DEBUG nova.compute.provider_tree [None req-0baef903-48ec-4b17-bb95-9e4f98e5a4d9 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1160.176592] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1160.176773] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1160.177659] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b81fe3b-56f9-4ee1-9789-28cfd2549af7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.182288] env[69328]: DEBUG oslo_vmware.api [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Waiting for the task: (returnval){ [ 1160.182288] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b377a7-2db3-2cc0-abdb-e232109a131c" [ 1160.182288] env[69328]: _type = "Task" [ 1160.182288] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.189693] env[69328]: DEBUG oslo_vmware.api [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b377a7-2db3-2cc0-abdb-e232109a131c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.264093] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 3ba646e8-a5c8-4917-a1c4-32b37affb598] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1160.359466] env[69328]: DEBUG nova.compute.manager [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1160.465371] env[69328]: DEBUG nova.virt.hardware [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1160.465626] env[69328]: DEBUG nova.virt.hardware [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1160.465784] env[69328]: DEBUG nova.virt.hardware [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1160.465978] env[69328]: DEBUG nova.virt.hardware [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1160.466131] env[69328]: DEBUG nova.virt.hardware [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1160.466281] env[69328]: DEBUG nova.virt.hardware [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1160.466485] env[69328]: DEBUG nova.virt.hardware [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1160.466643] env[69328]: DEBUG nova.virt.hardware [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1160.467521] env[69328]: DEBUG nova.virt.hardware [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1160.467521] env[69328]: DEBUG nova.virt.hardware [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1160.467521] env[69328]: DEBUG nova.virt.hardware [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1160.472819] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Reconfiguring VM instance instance-00000044 to detach disk 2000 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1160.473125] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ac63a032-a15b-4943-9de6-752dcd99777d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.492503] env[69328]: DEBUG oslo_vmware.api [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1160.492503] env[69328]: value = "task-3274192" [ 1160.492503] env[69328]: _type = "Task" [ 1160.492503] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.502890] env[69328]: DEBUG oslo_vmware.api [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274192, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.507448] env[69328]: DEBUG oslo_concurrency.lockutils [None req-670828b7-67a2-441d-8336-730d569ae3a3 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquiring lock "52c87371-4142-40d6-ac68-804aabd9f823" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1160.507685] env[69328]: DEBUG oslo_concurrency.lockutils [None req-670828b7-67a2-441d-8336-730d569ae3a3 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Lock "52c87371-4142-40d6-ac68-804aabd9f823" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1160.507892] env[69328]: DEBUG oslo_concurrency.lockutils [None req-670828b7-67a2-441d-8336-730d569ae3a3 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquiring lock "52c87371-4142-40d6-ac68-804aabd9f823-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1160.508247] env[69328]: DEBUG oslo_concurrency.lockutils [None req-670828b7-67a2-441d-8336-730d569ae3a3 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Lock "52c87371-4142-40d6-ac68-804aabd9f823-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1160.508247] env[69328]: DEBUG oslo_concurrency.lockutils [None req-670828b7-67a2-441d-8336-730d569ae3a3 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Lock "52c87371-4142-40d6-ac68-804aabd9f823-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1160.511883] env[69328]: INFO nova.compute.manager [None req-670828b7-67a2-441d-8336-730d569ae3a3 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Terminating instance [ 1160.679038] env[69328]: DEBUG nova.scheduler.client.report [None req-0baef903-48ec-4b17-bb95-9e4f98e5a4d9 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1160.694383] env[69328]: DEBUG oslo_vmware.api [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b377a7-2db3-2cc0-abdb-e232109a131c, 'name': SearchDatastore_Task, 'duration_secs': 0.008389} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.695650] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db0cc692-e254-49eb-b97f-a9c8e840998a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.701929] env[69328]: DEBUG oslo_vmware.api [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Waiting for the task: (returnval){ [ 1160.701929] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52c7dca5-ae63-e0e7-b15c-3908f3ed7455" [ 1160.701929] env[69328]: _type = "Task" [ 1160.701929] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.713728] env[69328]: DEBUG oslo_vmware.api [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52c7dca5-ae63-e0e7-b15c-3908f3ed7455, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.766956] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: a0952fdf-5570-4112-bc4d-e9f9cee1599c] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1161.002759] env[69328]: DEBUG oslo_vmware.api [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274192, 'name': ReconfigVM_Task, 'duration_secs': 0.232857} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.003048] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Reconfigured VM instance instance-00000044 to detach disk 2000 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1161.003837] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33957c61-be19-4d53-b492-80f841e07e7b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.021273] env[69328]: DEBUG nova.compute.manager [None req-670828b7-67a2-441d-8336-730d569ae3a3 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1161.021494] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-670828b7-67a2-441d-8336-730d569ae3a3 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1161.029056] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Reconfiguring VM instance instance-00000044 to attach disk [datastore2] f1be93b2-08db-41fe-87c4-f4e5f964cfa4/f1be93b2-08db-41fe-87c4-f4e5f964cfa4.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1161.029805] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7c8b092-592b-48c9-9012-d4b399671400 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.032736] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7b1fd927-7c4a-4dcf-9080-90a08c1b7da2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.051966] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-670828b7-67a2-441d-8336-730d569ae3a3 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1161.053210] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-48d67f7c-decf-49c5-8f3e-62c835df4129 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.054858] env[69328]: DEBUG oslo_vmware.api [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1161.054858] env[69328]: value = "task-3274193" [ 1161.054858] env[69328]: _type = "Task" [ 1161.054858] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.060163] env[69328]: DEBUG oslo_vmware.api [None req-670828b7-67a2-441d-8336-730d569ae3a3 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 1161.060163] env[69328]: value = "task-3274194" [ 1161.060163] env[69328]: _type = "Task" [ 1161.060163] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.063297] env[69328]: DEBUG oslo_vmware.api [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274193, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.071719] env[69328]: DEBUG oslo_vmware.api [None req-670828b7-67a2-441d-8336-730d569ae3a3 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3274194, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.163471] env[69328]: DEBUG oslo_concurrency.lockutils [None req-019042f2-f809-46a7-bb40-396a5cc0cfe7 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "interface-de8e6616-0460-4a6e-918c-a27818da96e2-571eed05-9f96-46fe-9592-59e38c00196c" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1161.163846] env[69328]: DEBUG oslo_concurrency.lockutils [None req-019042f2-f809-46a7-bb40-396a5cc0cfe7 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "interface-de8e6616-0460-4a6e-918c-a27818da96e2-571eed05-9f96-46fe-9592-59e38c00196c" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1161.224290] env[69328]: DEBUG oslo_vmware.api [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52c7dca5-ae63-e0e7-b15c-3908f3ed7455, 'name': SearchDatastore_Task, 'duration_secs': 0.010556} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.224740] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1161.225200] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 0c83f194-9346-4e24-a0ea-815d0b454ded/0c83f194-9346-4e24-a0ea-815d0b454ded.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1161.226147] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bf09a834-cc7d-4471-b838-776cc71fde1d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.236496] env[69328]: DEBUG oslo_vmware.api [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Waiting for the task: (returnval){ [ 1161.236496] env[69328]: value = "task-3274195" [ 1161.236496] env[69328]: _type = "Task" [ 1161.236496] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.247467] env[69328]: DEBUG oslo_vmware.api [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': task-3274195, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.270474] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: d017d08e-5f9e-4d05-8914-3320d4c87c9b] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1161.369213] env[69328]: DEBUG nova.compute.manager [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1161.392489] env[69328]: DEBUG nova.virt.hardware [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1161.392670] env[69328]: DEBUG nova.virt.hardware [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1161.392718] env[69328]: DEBUG nova.virt.hardware [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1161.392913] env[69328]: DEBUG nova.virt.hardware [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1161.393075] env[69328]: DEBUG nova.virt.hardware [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1161.393242] env[69328]: DEBUG nova.virt.hardware [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1161.393463] env[69328]: DEBUG nova.virt.hardware [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1161.393639] env[69328]: DEBUG nova.virt.hardware [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1161.393822] env[69328]: DEBUG nova.virt.hardware [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1161.393992] env[69328]: DEBUG nova.virt.hardware [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1161.394195] env[69328]: DEBUG nova.virt.hardware [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1161.395156] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1191c5b-d25b-46a0-b38a-e8c9075866f0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.403923] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c6d76c0-fe02-4b38-b5a5-90c43caf1791 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.419757] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Instance VIF info [] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1161.425780] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1161.426059] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1161.426278] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e5a6c5a2-20f3-4052-959d-01a02d5aae1b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.446482] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1161.446482] env[69328]: value = "task-3274196" [ 1161.446482] env[69328]: _type = "Task" [ 1161.446482] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.457799] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274196, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.568841] env[69328]: DEBUG oslo_vmware.api [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274193, 'name': ReconfigVM_Task, 'duration_secs': 0.356196} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.572287] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Reconfigured VM instance instance-00000044 to attach disk [datastore2] f1be93b2-08db-41fe-87c4-f4e5f964cfa4/f1be93b2-08db-41fe-87c4-f4e5f964cfa4.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1161.572657] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Updating instance 'f1be93b2-08db-41fe-87c4-f4e5f964cfa4' progress to 50 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1161.584634] env[69328]: DEBUG oslo_vmware.api [None req-670828b7-67a2-441d-8336-730d569ae3a3 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3274194, 'name': PowerOffVM_Task, 'duration_secs': 0.309004} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.584913] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-670828b7-67a2-441d-8336-730d569ae3a3 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1161.585117] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-670828b7-67a2-441d-8336-730d569ae3a3 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1161.585403] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7e2c838b-82ff-4144-9006-34e5b00a045d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.667117] env[69328]: DEBUG oslo_concurrency.lockutils [None req-019042f2-f809-46a7-bb40-396a5cc0cfe7 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "de8e6616-0460-4a6e-918c-a27818da96e2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1161.667353] env[69328]: DEBUG oslo_concurrency.lockutils [None req-019042f2-f809-46a7-bb40-396a5cc0cfe7 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquired lock "de8e6616-0460-4a6e-918c-a27818da96e2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1161.668908] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14539e2d-0021-4649-980f-872379650d9e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.672913] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-670828b7-67a2-441d-8336-730d569ae3a3 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1161.673172] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-670828b7-67a2-441d-8336-730d569ae3a3 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1161.673338] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-670828b7-67a2-441d-8336-730d569ae3a3 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Deleting the datastore file [datastore1] 52c87371-4142-40d6-ac68-804aabd9f823 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1161.674092] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9d3d2768-63aa-4520-86bb-8d4cee7db116 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.682721] env[69328]: DEBUG oslo_vmware.api [None req-670828b7-67a2-441d-8336-730d569ae3a3 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 1161.682721] env[69328]: value = "task-3274198" [ 1161.682721] env[69328]: _type = "Task" [ 1161.682721] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.696517] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0baef903-48ec-4b17-bb95-9e4f98e5a4d9 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.348s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1161.703259] env[69328]: DEBUG oslo_concurrency.lockutils [None req-876c3230-90ec-4b39-a383-5719ae934358 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.510s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1161.703590] env[69328]: DEBUG nova.objects.instance [None req-876c3230-90ec-4b39-a383-5719ae934358 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Lazy-loading 'resources' on Instance uuid 03f0adc8-d640-4248-be9d-ab4ba0cbe760 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1161.705311] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3877532-c68a-4559-aab5-6a854d710947 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.720481] env[69328]: INFO nova.compute.manager [None req-acb87243-1b67-470f-a1d7-fdfc59947a9a tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Rebuilding instance [ 1161.723589] env[69328]: DEBUG oslo_vmware.api [None req-670828b7-67a2-441d-8336-730d569ae3a3 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3274198, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.754042] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-019042f2-f809-46a7-bb40-396a5cc0cfe7 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Reconfiguring VM to detach interface {{(pid=69328) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1161.758142] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3cccafa1-47a5-4809-b2bc-851975be4466 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.775991] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 5a45bd6a-b063-4104-a85a-d78a4bb9452e] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1161.784677] env[69328]: DEBUG oslo_vmware.api [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': task-3274195, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.790396] env[69328]: DEBUG oslo_vmware.api [None req-019042f2-f809-46a7-bb40-396a5cc0cfe7 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for the task: (returnval){ [ 1161.790396] env[69328]: value = "task-3274199" [ 1161.790396] env[69328]: _type = "Task" [ 1161.790396] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.801875] env[69328]: DEBUG oslo_vmware.api [None req-019042f2-f809-46a7-bb40-396a5cc0cfe7 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274199, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.810516] env[69328]: DEBUG nova.compute.manager [None req-acb87243-1b67-470f-a1d7-fdfc59947a9a tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1161.811465] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4712e700-f448-44ae-8936-04d2f409d21c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.956112] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274196, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.079531] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0c5ae91-4678-4fb3-8a4c-2acc28bbcf1c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.101806] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-141b2389-c98c-4d15-98c0-28854046dff2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.120652] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Updating instance 'f1be93b2-08db-41fe-87c4-f4e5f964cfa4' progress to 67 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1162.178364] env[69328]: INFO nova.compute.manager [None req-f456ecf7-d747-413b-bc17-45b7e9760fc6 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Get console output [ 1162.178704] env[69328]: WARNING nova.virt.vmwareapi.driver [None req-f456ecf7-d747-413b-bc17-45b7e9760fc6 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] The console log is missing. Check your VSPC configuration [ 1162.204164] env[69328]: DEBUG oslo_vmware.api [None req-670828b7-67a2-441d-8336-730d569ae3a3 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3274198, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.264409} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.204408] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-670828b7-67a2-441d-8336-730d569ae3a3 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1162.204613] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-670828b7-67a2-441d-8336-730d569ae3a3 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1162.204797] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-670828b7-67a2-441d-8336-730d569ae3a3 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1162.204968] env[69328]: INFO nova.compute.manager [None req-670828b7-67a2-441d-8336-730d569ae3a3 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1162.205217] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-670828b7-67a2-441d-8336-730d569ae3a3 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1162.205403] env[69328]: DEBUG nova.compute.manager [-] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1162.205511] env[69328]: DEBUG nova.network.neutron [-] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1162.257293] env[69328]: DEBUG oslo_vmware.api [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': task-3274195, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.58236} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.257555] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 0c83f194-9346-4e24-a0ea-815d0b454ded/0c83f194-9346-4e24-a0ea-815d0b454ded.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1162.257775] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 0c83f194-9346-4e24-a0ea-815d0b454ded] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1162.258039] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b088e3fe-a359-48f9-82b7-41dd62ad1f8a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.264126] env[69328]: INFO nova.scheduler.client.report [None req-0baef903-48ec-4b17-bb95-9e4f98e5a4d9 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Deleted allocation for migration c9d84624-27a9-4e50-abe2-112cbefbaf04 [ 1162.268988] env[69328]: DEBUG oslo_vmware.api [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Waiting for the task: (returnval){ [ 1162.268988] env[69328]: value = "task-3274200" [ 1162.268988] env[69328]: _type = "Task" [ 1162.268988] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.278742] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 7232ad5c-9f4e-425e-824a-4c3750f665eb] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1162.287948] env[69328]: DEBUG oslo_vmware.api [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': task-3274200, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.304057] env[69328]: DEBUG oslo_vmware.api [None req-019042f2-f809-46a7-bb40-396a5cc0cfe7 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274199, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.458733] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274196, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.484255] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-893d7fe8-a0a7-479d-8510-b8a438acbf92 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.492230] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80f553f6-015f-4584-8f9a-3fb34de80504 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.208226] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0baef903-48ec-4b17-bb95-9e4f98e5a4d9 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "ee3609ea-0855-47c2-874c-349c80419781" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 12.233s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1163.209252] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 55d9ba65-e5c8-446a-a209-a840f30ff02c] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1163.211284] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-acb87243-1b67-470f-a1d7-fdfc59947a9a tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1163.213829] env[69328]: DEBUG nova.compute.manager [req-fc98361f-cae3-40e4-9442-efcbe70a5b31 req-f4b54e10-c3ef-4da8-b4f7-94a817d59e63 service nova] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Received event network-vif-deleted-7da3de27-ee87-400f-ae26-a3a6995a8363 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1163.214099] env[69328]: INFO nova.compute.manager [req-fc98361f-cae3-40e4-9442-efcbe70a5b31 req-f4b54e10-c3ef-4da8-b4f7-94a817d59e63 service nova] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Neutron deleted interface 7da3de27-ee87-400f-ae26-a3a6995a8363; detaching it from the instance and deleting it from the info cache [ 1163.214302] env[69328]: DEBUG nova.network.neutron [req-fc98361f-cae3-40e4-9442-efcbe70a5b31 req-f4b54e10-c3ef-4da8-b4f7-94a817d59e63 service nova] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1163.221733] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6bc2120c-08a0-475d-bddf-a59b0f46e81b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.226183] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-210c1c8a-a0a2-4790-bfa5-ab2fcc9edc1e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.242298] env[69328]: DEBUG oslo_vmware.api [None req-019042f2-f809-46a7-bb40-396a5cc0cfe7 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274199, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.249549] env[69328]: DEBUG oslo_vmware.api [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': task-3274200, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.107657} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.249858] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274196, 'name': CreateVM_Task, 'duration_secs': 1.344161} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.250106] env[69328]: DEBUG oslo_vmware.api [None req-acb87243-1b67-470f-a1d7-fdfc59947a9a tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Waiting for the task: (returnval){ [ 1163.250106] env[69328]: value = "task-3274201" [ 1163.250106] env[69328]: _type = "Task" [ 1163.250106] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.252999] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 0c83f194-9346-4e24-a0ea-815d0b454ded] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1163.253130] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1163.253941] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-410cc8ff-cf5a-463a-8681-efbeef718991 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.257536] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1163.257772] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1163.259795] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1163.262264] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90fe3ff5-ec3c-4d2a-ae99-0f7767d0af67 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.266370] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03bad639-0fe5-4fca-b1b7-b8cce22ca796 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.270259] env[69328]: DEBUG nova.network.neutron [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Port 1018560a-13d7-4d01-8fc4-03d0b9beab90 binding to destination host cpu-1 is already ACTIVE {{(pid=69328) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1163.301546] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 0c83f194-9346-4e24-a0ea-815d0b454ded] Reconfiguring VM instance instance-00000075 to attach disk [datastore1] 0c83f194-9346-4e24-a0ea-815d0b454ded/0c83f194-9346-4e24-a0ea-815d0b454ded.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1163.302316] env[69328]: DEBUG oslo_vmware.api [None req-acb87243-1b67-470f-a1d7-fdfc59947a9a tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Task: {'id': task-3274201, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.302416] env[69328]: DEBUG nova.compute.provider_tree [None req-876c3230-90ec-4b39-a383-5719ae934358 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1163.309145] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bca31b8a-3010-4afb-81d2-9a5e0f41ace4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.323967] env[69328]: DEBUG oslo_vmware.api [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Waiting for the task: (returnval){ [ 1163.323967] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52354f54-a492-31d8-a30a-eb44ed032f5b" [ 1163.323967] env[69328]: _type = "Task" [ 1163.323967] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.333074] env[69328]: DEBUG oslo_vmware.api [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Waiting for the task: (returnval){ [ 1163.333074] env[69328]: value = "task-3274202" [ 1163.333074] env[69328]: _type = "Task" [ 1163.333074] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.343402] env[69328]: DEBUG oslo_vmware.api [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52354f54-a492-31d8-a30a-eb44ed032f5b, 'name': SearchDatastore_Task, 'duration_secs': 0.01046} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.344168] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1163.344450] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1163.344732] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1163.344948] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1163.345241] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1163.345544] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-98816e92-b4b8-4a51-88e3-d81ec8d40116 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.351240] env[69328]: DEBUG oslo_vmware.api [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': task-3274202, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.362976] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1163.363326] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1163.364439] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52c61bdd-bd28-46a8-89ce-dd496e28a87f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.371889] env[69328]: DEBUG oslo_vmware.api [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Waiting for the task: (returnval){ [ 1163.371889] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52aea6bb-4df9-c11a-4ebf-efb599622c72" [ 1163.371889] env[69328]: _type = "Task" [ 1163.371889] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.385172] env[69328]: DEBUG oslo_vmware.api [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52aea6bb-4df9-c11a-4ebf-efb599622c72, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.722921] env[69328]: DEBUG oslo_vmware.api [None req-019042f2-f809-46a7-bb40-396a5cc0cfe7 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274199, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.730647] env[69328]: DEBUG nova.network.neutron [-] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1163.732718] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: fd72bae3-cb72-48d0-a0df-9ea3a770a86c] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1163.734372] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c3292ede-333a-45e1-b265-f03e0cfa3848 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.747489] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4006af54-081a-4a38-afa6-ea32b79562e6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.770689] env[69328]: DEBUG oslo_vmware.api [None req-acb87243-1b67-470f-a1d7-fdfc59947a9a tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Task: {'id': task-3274201, 'name': PowerOffVM_Task, 'duration_secs': 0.177001} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.771043] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-acb87243-1b67-470f-a1d7-fdfc59947a9a tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1163.771753] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-acb87243-1b67-470f-a1d7-fdfc59947a9a tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1163.772014] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-72a71541-fc17-49fd-96a2-092dd7274191 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.787884] env[69328]: DEBUG nova.compute.manager [req-fc98361f-cae3-40e4-9442-efcbe70a5b31 req-f4b54e10-c3ef-4da8-b4f7-94a817d59e63 service nova] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Detach interface failed, port_id=7da3de27-ee87-400f-ae26-a3a6995a8363, reason: Instance 52c87371-4142-40d6-ac68-804aabd9f823 could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1163.789512] env[69328]: DEBUG oslo_vmware.api [None req-acb87243-1b67-470f-a1d7-fdfc59947a9a tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Waiting for the task: (returnval){ [ 1163.789512] env[69328]: value = "task-3274203" [ 1163.789512] env[69328]: _type = "Task" [ 1163.789512] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.798161] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-acb87243-1b67-470f-a1d7-fdfc59947a9a tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] VM already powered off {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1163.798390] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-acb87243-1b67-470f-a1d7-fdfc59947a9a tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Volume detach. Driver type: vmdk {{(pid=69328) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1163.798584] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-acb87243-1b67-470f-a1d7-fdfc59947a9a tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653934', 'volume_id': '10778dba-1e87-4365-bb28-98360345c613', 'name': 'volume-10778dba-1e87-4365-bb28-98360345c613', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732', 'attached_at': '', 'detached_at': '', 'volume_id': '10778dba-1e87-4365-bb28-98360345c613', 'serial': '10778dba-1e87-4365-bb28-98360345c613'} {{(pid=69328) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1163.799400] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b56523d-440c-4503-9003-3fcc06c0164a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.821188] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11b11fd9-dbd1-4f53-b0bd-019ea5bd00d8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.833725] env[69328]: DEBUG nova.scheduler.client.report [None req-876c3230-90ec-4b39-a383-5719ae934358 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1163.844791] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "f1be93b2-08db-41fe-87c4-f4e5f964cfa4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1163.845046] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "f1be93b2-08db-41fe-87c4-f4e5f964cfa4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1163.845224] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "f1be93b2-08db-41fe-87c4-f4e5f964cfa4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1163.849845] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-355d2535-12ea-4649-b538-7cf9efd9ad3a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.860848] env[69328]: DEBUG oslo_vmware.api [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': task-3274202, 'name': ReconfigVM_Task, 'duration_secs': 0.515818} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.874289] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 0c83f194-9346-4e24-a0ea-815d0b454ded] Reconfigured VM instance instance-00000075 to attach disk [datastore1] 0c83f194-9346-4e24-a0ea-815d0b454ded/0c83f194-9346-4e24-a0ea-815d0b454ded.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1163.875623] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5c08ecaa-ea2b-4515-a00b-3fc847d8804a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.880345] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41b338b7-b5ea-4fdf-8a0a-9f37b213ce50 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.889762] env[69328]: DEBUG oslo_vmware.api [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52aea6bb-4df9-c11a-4ebf-efb599622c72, 'name': SearchDatastore_Task, 'duration_secs': 0.012949} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.901492] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-acb87243-1b67-470f-a1d7-fdfc59947a9a tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] The volume has not been displaced from its original location: [datastore2] volume-10778dba-1e87-4365-bb28-98360345c613/volume-10778dba-1e87-4365-bb28-98360345c613.vmdk. No consolidation needed. {{(pid=69328) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1163.906794] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-acb87243-1b67-470f-a1d7-fdfc59947a9a tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Reconfiguring VM instance instance-0000006d to detach disk 2000 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1163.907170] env[69328]: DEBUG oslo_vmware.api [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Waiting for the task: (returnval){ [ 1163.907170] env[69328]: value = "task-3274204" [ 1163.907170] env[69328]: _type = "Task" [ 1163.907170] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.907373] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8857e746-23a8-4341-91b2-013e17cf84c3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.909700] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a562f216-7727-4b73-970c-29ef82a8a274 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.929233] env[69328]: DEBUG oslo_vmware.api [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Waiting for the task: (returnval){ [ 1163.929233] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]525c96b4-435d-6bc1-12ed-1e61131eb44a" [ 1163.929233] env[69328]: _type = "Task" [ 1163.929233] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.932546] env[69328]: DEBUG oslo_vmware.api [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': task-3274204, 'name': Rename_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.936912] env[69328]: DEBUG oslo_vmware.api [None req-acb87243-1b67-470f-a1d7-fdfc59947a9a tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Waiting for the task: (returnval){ [ 1163.936912] env[69328]: value = "task-3274205" [ 1163.936912] env[69328]: _type = "Task" [ 1163.936912] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.943282] env[69328]: DEBUG oslo_vmware.api [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]525c96b4-435d-6bc1-12ed-1e61131eb44a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.949074] env[69328]: DEBUG oslo_vmware.api [None req-acb87243-1b67-470f-a1d7-fdfc59947a9a tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Task: {'id': task-3274205, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.222041] env[69328]: DEBUG oslo_vmware.api [None req-019042f2-f809-46a7-bb40-396a5cc0cfe7 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274199, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.232972] env[69328]: INFO nova.compute.manager [-] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Took 2.03 seconds to deallocate network for instance. [ 1164.238043] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: e5a2de79-cfbc-4d9c-8b58-5aa819657978] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1164.347905] env[69328]: DEBUG oslo_concurrency.lockutils [None req-876c3230-90ec-4b39-a383-5719ae934358 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.645s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1164.350274] env[69328]: DEBUG oslo_concurrency.lockutils [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.740s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1164.351803] env[69328]: INFO nova.compute.claims [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1164.373957] env[69328]: INFO nova.scheduler.client.report [None req-876c3230-90ec-4b39-a383-5719ae934358 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Deleted allocations for instance 03f0adc8-d640-4248-be9d-ab4ba0cbe760 [ 1164.433289] env[69328]: DEBUG oslo_vmware.api [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': task-3274204, 'name': Rename_Task, 'duration_secs': 0.136232} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.436261] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 0c83f194-9346-4e24-a0ea-815d0b454ded] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1164.436513] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2f01387a-c131-4b94-8850-5816b2e0cfe9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.444856] env[69328]: DEBUG oslo_vmware.api [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]525c96b4-435d-6bc1-12ed-1e61131eb44a, 'name': SearchDatastore_Task, 'duration_secs': 0.012753} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.446272] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1164.446533] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 53eb70f0-1734-4386-b747-014561ba577b/53eb70f0-1734-4386-b747-014561ba577b.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1164.446827] env[69328]: DEBUG oslo_vmware.api [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Waiting for the task: (returnval){ [ 1164.446827] env[69328]: value = "task-3274206" [ 1164.446827] env[69328]: _type = "Task" [ 1164.446827] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.447013] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-41cf177e-b2b7-4323-86ae-b56bf8ca2a4c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.453910] env[69328]: DEBUG oslo_vmware.api [None req-acb87243-1b67-470f-a1d7-fdfc59947a9a tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Task: {'id': task-3274205, 'name': ReconfigVM_Task, 'duration_secs': 0.163345} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.454460] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-acb87243-1b67-470f-a1d7-fdfc59947a9a tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Reconfigured VM instance instance-0000006d to detach disk 2000 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1164.460195] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-444de6a3-219d-4e85-bf2e-bf0cc183e262 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.470271] env[69328]: DEBUG oslo_vmware.api [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Waiting for the task: (returnval){ [ 1164.470271] env[69328]: value = "task-3274207" [ 1164.470271] env[69328]: _type = "Task" [ 1164.470271] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.473766] env[69328]: DEBUG oslo_vmware.api [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': task-3274206, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.478750] env[69328]: DEBUG oslo_vmware.api [None req-acb87243-1b67-470f-a1d7-fdfc59947a9a tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Waiting for the task: (returnval){ [ 1164.478750] env[69328]: value = "task-3274208" [ 1164.478750] env[69328]: _type = "Task" [ 1164.478750] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.485993] env[69328]: DEBUG oslo_vmware.api [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': task-3274207, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.491098] env[69328]: DEBUG oslo_vmware.api [None req-acb87243-1b67-470f-a1d7-fdfc59947a9a tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Task: {'id': task-3274208, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.724900] env[69328]: DEBUG oslo_vmware.api [None req-019042f2-f809-46a7-bb40-396a5cc0cfe7 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274199, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.740319] env[69328]: DEBUG oslo_concurrency.lockutils [None req-670828b7-67a2-441d-8336-730d569ae3a3 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1164.740645] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 36f6aab5-2774-402b-9db6-9912f2d5d473] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1164.882242] env[69328]: DEBUG oslo_concurrency.lockutils [None req-876c3230-90ec-4b39-a383-5719ae934358 tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Lock "03f0adc8-d640-4248-be9d-ab4ba0cbe760" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.380s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1164.909473] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "refresh_cache-f1be93b2-08db-41fe-87c4-f4e5f964cfa4" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1164.909732] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquired lock "refresh_cache-f1be93b2-08db-41fe-87c4-f4e5f964cfa4" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1164.909931] env[69328]: DEBUG nova.network.neutron [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1164.962576] env[69328]: DEBUG oslo_vmware.api [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': task-3274206, 'name': PowerOnVM_Task, 'duration_secs': 0.455894} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.962962] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 0c83f194-9346-4e24-a0ea-815d0b454ded] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1164.963263] env[69328]: INFO nova.compute.manager [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 0c83f194-9346-4e24-a0ea-815d0b454ded] Took 5.97 seconds to spawn the instance on the hypervisor. [ 1164.963517] env[69328]: DEBUG nova.compute.manager [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 0c83f194-9346-4e24-a0ea-815d0b454ded] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1164.964486] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f085857b-adea-4dc4-8f9c-59913e13c9c1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.992960] env[69328]: DEBUG oslo_vmware.api [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': task-3274207, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.996687] env[69328]: DEBUG oslo_vmware.api [None req-acb87243-1b67-470f-a1d7-fdfc59947a9a tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Task: {'id': task-3274208, 'name': ReconfigVM_Task, 'duration_secs': 0.151217} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.997046] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-acb87243-1b67-470f-a1d7-fdfc59947a9a tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653934', 'volume_id': '10778dba-1e87-4365-bb28-98360345c613', 'name': 'volume-10778dba-1e87-4365-bb28-98360345c613', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732', 'attached_at': '', 'detached_at': '', 'volume_id': '10778dba-1e87-4365-bb28-98360345c613', 'serial': '10778dba-1e87-4365-bb28-98360345c613'} {{(pid=69328) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1164.997382] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-acb87243-1b67-470f-a1d7-fdfc59947a9a tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1164.998150] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41e76307-0909-48b4-91e6-d3f9f234dc29 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.005998] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-acb87243-1b67-470f-a1d7-fdfc59947a9a tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1165.006170] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e62fadf6-0ef3-4d25-b705-3346939f39f0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.082160] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-acb87243-1b67-470f-a1d7-fdfc59947a9a tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1165.082382] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-acb87243-1b67-470f-a1d7-fdfc59947a9a tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1165.082566] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-acb87243-1b67-470f-a1d7-fdfc59947a9a tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Deleting the datastore file [datastore2] 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1165.082833] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8412bcf9-3a75-47fe-acf6-d86021966583 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.089876] env[69328]: DEBUG oslo_vmware.api [None req-acb87243-1b67-470f-a1d7-fdfc59947a9a tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Waiting for the task: (returnval){ [ 1165.089876] env[69328]: value = "task-3274211" [ 1165.089876] env[69328]: _type = "Task" [ 1165.089876] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.100259] env[69328]: DEBUG oslo_vmware.api [None req-acb87243-1b67-470f-a1d7-fdfc59947a9a tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Task: {'id': task-3274211, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.223638] env[69328]: DEBUG oslo_vmware.api [None req-019042f2-f809-46a7-bb40-396a5cc0cfe7 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274199, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.244314] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 4a990411-16cd-4e53-9068-29654b69abe6] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1165.496474] env[69328]: DEBUG oslo_vmware.api [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': task-3274207, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.556117} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.497077] env[69328]: INFO nova.compute.manager [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 0c83f194-9346-4e24-a0ea-815d0b454ded] Took 13.76 seconds to build instance. [ 1165.497915] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 53eb70f0-1734-4386-b747-014561ba577b/53eb70f0-1734-4386-b747-014561ba577b.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1165.498145] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1165.498389] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1ae8d760-57ba-4ee7-86ff-a427b8c3a1d2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.506935] env[69328]: DEBUG oslo_vmware.api [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Waiting for the task: (returnval){ [ 1165.506935] env[69328]: value = "task-3274212" [ 1165.506935] env[69328]: _type = "Task" [ 1165.506935] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.519227] env[69328]: DEBUG oslo_vmware.api [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': task-3274212, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.582495] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25004d9e-94b9-454c-8506-38a5c3764ad6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.597521] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8259eab-13ca-49dd-9b95-4941ac58a529 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.612388] env[69328]: DEBUG oslo_vmware.api [None req-acb87243-1b67-470f-a1d7-fdfc59947a9a tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Task: {'id': task-3274211, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.083894} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.637870] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-acb87243-1b67-470f-a1d7-fdfc59947a9a tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1165.638218] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-acb87243-1b67-470f-a1d7-fdfc59947a9a tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1165.638437] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-acb87243-1b67-470f-a1d7-fdfc59947a9a tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1165.643458] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b52f0ba7-cfd1-46fe-b6ee-79708885611d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.661385] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01350389-6831-448a-8751-ba5635aa22d4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.677259] env[69328]: DEBUG nova.compute.provider_tree [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1165.679304] env[69328]: DEBUG nova.network.neutron [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Updating instance_info_cache with network_info: [{"id": "1018560a-13d7-4d01-8fc4-03d0b9beab90", "address": "fa:16:3e:33:ba:27", "network": {"id": "4031def8-0553-461f-9528-aa338b9d7b2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1834835647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.196", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f357b5a9494b4849a83aa934c5d4e26b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "357d2811-e990-4985-9f9e-b158d10d3699", "external-id": "nsx-vlan-transportzone-641", "segmentation_id": 641, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1018560a-13", "ovs_interfaceid": "1018560a-13d7-4d01-8fc4-03d0b9beab90", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1165.692989] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-acb87243-1b67-470f-a1d7-fdfc59947a9a tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Volume detach. Driver type: vmdk {{(pid=69328) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1165.693302] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dcfd5f85-3953-43c2-a17e-797a50d48bdd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.702620] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06b130d4-82ac-46bf-87c3-3745700df04d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.722427] env[69328]: DEBUG oslo_vmware.api [None req-019042f2-f809-46a7-bb40-396a5cc0cfe7 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274199, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.742824] env[69328]: ERROR nova.compute.manager [None req-acb87243-1b67-470f-a1d7-fdfc59947a9a tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Failed to detach volume 10778dba-1e87-4365-bb28-98360345c613 from /dev/sda: nova.exception.InstanceNotFound: Instance 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732 could not be found. [ 1165.742824] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Traceback (most recent call last): [ 1165.742824] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 1165.742824] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] self.driver.rebuild(**kwargs) [ 1165.742824] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 1165.742824] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] raise NotImplementedError() [ 1165.742824] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] NotImplementedError [ 1165.742824] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] [ 1165.742824] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] During handling of the above exception, another exception occurred: [ 1165.742824] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] [ 1165.742824] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Traceback (most recent call last): [ 1165.742824] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 1165.742824] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] self.driver.detach_volume(context, old_connection_info, [ 1165.742824] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 1165.742824] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] return self._volumeops.detach_volume(connection_info, instance) [ 1165.742824] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1165.742824] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] self._detach_volume_vmdk(connection_info, instance) [ 1165.742824] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1165.742824] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1165.742824] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 1165.742824] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] stable_ref.fetch_moref(session) [ 1165.742824] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 1165.742824] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1165.742824] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] nova.exception.InstanceNotFound: Instance 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732 could not be found. [ 1165.742824] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] [ 1165.747632] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 4d320c76-45bb-451c-8fbb-3dd2d64f56d5] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1165.876530] env[69328]: DEBUG nova.compute.utils [None req-acb87243-1b67-470f-a1d7-fdfc59947a9a tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Build of instance 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732 aborted: Failed to rebuild volume backed instance. {{(pid=69328) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 1165.878800] env[69328]: ERROR nova.compute.manager [None req-acb87243-1b67-470f-a1d7-fdfc59947a9a tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Setting instance vm_state to ERROR: nova.exception.BuildAbortException: Build of instance 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732 aborted: Failed to rebuild volume backed instance. [ 1165.878800] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Traceback (most recent call last): [ 1165.878800] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 1165.878800] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] self.driver.rebuild(**kwargs) [ 1165.878800] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 1165.878800] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] raise NotImplementedError() [ 1165.878800] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] NotImplementedError [ 1165.878800] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] [ 1165.878800] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] During handling of the above exception, another exception occurred: [ 1165.878800] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] [ 1165.878800] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Traceback (most recent call last): [ 1165.878800] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] File "/opt/stack/nova/nova/compute/manager.py", line 3643, in _rebuild_volume_backed_instance [ 1165.878800] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] self._detach_root_volume(context, instance, root_bdm) [ 1165.878800] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] File "/opt/stack/nova/nova/compute/manager.py", line 3622, in _detach_root_volume [ 1165.878800] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] with excutils.save_and_reraise_exception(): [ 1165.878800] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1165.878800] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] self.force_reraise() [ 1165.878800] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1165.878800] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] raise self.value [ 1165.878800] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 1165.878800] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] self.driver.detach_volume(context, old_connection_info, [ 1165.878800] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 1165.878800] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] return self._volumeops.detach_volume(connection_info, instance) [ 1165.878800] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1165.878800] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] self._detach_volume_vmdk(connection_info, instance) [ 1165.878800] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1165.878800] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1165.878800] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 1165.878800] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] stable_ref.fetch_moref(session) [ 1165.878800] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 1165.878800] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1165.878800] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] nova.exception.InstanceNotFound: Instance 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732 could not be found. [ 1165.878800] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] [ 1165.878800] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] During handling of the above exception, another exception occurred: [ 1165.878800] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] [ 1165.878800] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Traceback (most recent call last): [ 1165.878800] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] File "/opt/stack/nova/nova/compute/manager.py", line 11471, in _error_out_instance_on_exception [ 1165.878800] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] yield [ 1165.878800] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] File "/opt/stack/nova/nova/compute/manager.py", line 3911, in rebuild_instance [ 1165.878800] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] self._do_rebuild_instance_with_claim( [ 1165.880045] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] File "/opt/stack/nova/nova/compute/manager.py", line 3997, in _do_rebuild_instance_with_claim [ 1165.880045] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] self._do_rebuild_instance( [ 1165.880045] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] File "/opt/stack/nova/nova/compute/manager.py", line 4189, in _do_rebuild_instance [ 1165.880045] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] self._rebuild_default_impl(**kwargs) [ 1165.880045] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] File "/opt/stack/nova/nova/compute/manager.py", line 3766, in _rebuild_default_impl [ 1165.880045] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] self._rebuild_volume_backed_instance( [ 1165.880045] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] File "/opt/stack/nova/nova/compute/manager.py", line 3658, in _rebuild_volume_backed_instance [ 1165.880045] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] raise exception.BuildAbortException( [ 1165.880045] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] nova.exception.BuildAbortException: Build of instance 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732 aborted: Failed to rebuild volume backed instance. [ 1165.880045] env[69328]: ERROR nova.compute.manager [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] [ 1166.001734] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e587905f-099c-4683-9de7-cf69e00f0eb4 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Lock "0c83f194-9346-4e24-a0ea-815d0b454ded" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.272s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1166.019292] env[69328]: DEBUG oslo_vmware.api [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': task-3274212, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.239532} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.020067] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1166.020324] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c98b48be-2fcd-4e5d-b69d-01ee2ee918be {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.039578] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Reconfiguring VM instance instance-00000076 to attach disk [datastore2] 53eb70f0-1734-4386-b747-014561ba577b/53eb70f0-1734-4386-b747-014561ba577b.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1166.039837] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e06f1af0-0d8b-44dd-b7a8-6f6013735222 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.059490] env[69328]: DEBUG oslo_vmware.api [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Waiting for the task: (returnval){ [ 1166.059490] env[69328]: value = "task-3274213" [ 1166.059490] env[69328]: _type = "Task" [ 1166.059490] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.067177] env[69328]: DEBUG oslo_vmware.api [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': task-3274213, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.182638] env[69328]: DEBUG nova.scheduler.client.report [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1166.186190] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Releasing lock "refresh_cache-f1be93b2-08db-41fe-87c4-f4e5f964cfa4" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1166.223841] env[69328]: DEBUG oslo_vmware.api [None req-019042f2-f809-46a7-bb40-396a5cc0cfe7 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274199, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.251647] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 3b4b6687-fb6d-4bb7-8604-20a3ba706ff3] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1166.570718] env[69328]: DEBUG oslo_vmware.api [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': task-3274213, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.691215] env[69328]: DEBUG oslo_concurrency.lockutils [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.341s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1166.691781] env[69328]: DEBUG nova.compute.manager [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1166.698010] env[69328]: DEBUG oslo_concurrency.lockutils [None req-670828b7-67a2-441d-8336-730d569ae3a3 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.958s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1166.698607] env[69328]: DEBUG nova.objects.instance [None req-670828b7-67a2-441d-8336-730d569ae3a3 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Lazy-loading 'resources' on Instance uuid 52c87371-4142-40d6-ac68-804aabd9f823 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1166.714138] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72c72e96-77b5-4121-81fa-24b5ef8fed37 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.728376] env[69328]: DEBUG oslo_vmware.api [None req-019042f2-f809-46a7-bb40-396a5cc0cfe7 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274199, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.743732] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25280eb3-7390-4892-983c-0dfc7de31b8e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.752305] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Updating instance 'f1be93b2-08db-41fe-87c4-f4e5f964cfa4' progress to 83 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1166.756612] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 25fb207b-9388-4198-bb48-ab7cebd43375] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1167.070653] env[69328]: DEBUG oslo_vmware.api [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': task-3274213, 'name': ReconfigVM_Task, 'duration_secs': 0.623434} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.071009] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Reconfigured VM instance instance-00000076 to attach disk [datastore2] 53eb70f0-1734-4386-b747-014561ba577b/53eb70f0-1734-4386-b747-014561ba577b.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1167.071694] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-635eb2fc-cc43-4d41-9918-2aeeffb31b9a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.080414] env[69328]: DEBUG oslo_vmware.api [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Waiting for the task: (returnval){ [ 1167.080414] env[69328]: value = "task-3274214" [ 1167.080414] env[69328]: _type = "Task" [ 1167.080414] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.090017] env[69328]: DEBUG oslo_vmware.api [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': task-3274214, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.198148] env[69328]: DEBUG nova.compute.utils [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1167.200714] env[69328]: DEBUG nova.compute.manager [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1167.200714] env[69328]: DEBUG nova.network.neutron [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1167.230264] env[69328]: DEBUG oslo_vmware.api [None req-019042f2-f809-46a7-bb40-396a5cc0cfe7 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274199, 'name': ReconfigVM_Task} progress is 18%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.250758] env[69328]: DEBUG nova.policy [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a07713f537e84711bc559a085d1e05f1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8bbb75992830459c85c818e850261c61', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 1167.258731] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1167.261473] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 8c648ccb-ed14-4f5d-92aa-5a65d6efe1b4] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1167.263269] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-254b053e-8582-468d-b9d5-3eb5454a2db0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.272840] env[69328]: DEBUG oslo_vmware.api [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1167.272840] env[69328]: value = "task-3274215" [ 1167.272840] env[69328]: _type = "Task" [ 1167.272840] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.285032] env[69328]: DEBUG oslo_vmware.api [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274215, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.464811] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0c35f55-5f3d-4e74-acfa-46abc649f9d6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.473548] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b7878da-5221-4e5e-a30f-3315549dad13 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.524478] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef9ee929-e08b-4a47-a590-7acd1d179474 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.535084] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44fd5b4f-ee6f-4ef7-af17-d660d72849de {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.556827] env[69328]: DEBUG nova.compute.provider_tree [None req-670828b7-67a2-441d-8336-730d569ae3a3 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1167.593657] env[69328]: DEBUG oslo_vmware.api [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': task-3274214, 'name': Rename_Task, 'duration_secs': 0.148116} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.594127] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1167.594777] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6b322ab5-b5b5-493e-8a04-08f4146ddcde {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.605513] env[69328]: DEBUG oslo_vmware.api [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Waiting for the task: (returnval){ [ 1167.605513] env[69328]: value = "task-3274216" [ 1167.605513] env[69328]: _type = "Task" [ 1167.605513] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.621560] env[69328]: DEBUG oslo_vmware.api [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': task-3274216, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.675997] env[69328]: DEBUG nova.network.neutron [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Successfully created port: d76b0cd3-fa46-430c-b29d-7439c7857ba3 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1167.709983] env[69328]: DEBUG nova.compute.manager [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1167.729888] env[69328]: DEBUG oslo_vmware.api [None req-019042f2-f809-46a7-bb40-396a5cc0cfe7 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274199, 'name': ReconfigVM_Task, 'duration_secs': 5.790103} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.730257] env[69328]: DEBUG oslo_concurrency.lockutils [None req-019042f2-f809-46a7-bb40-396a5cc0cfe7 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Releasing lock "de8e6616-0460-4a6e-918c-a27818da96e2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1167.730509] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-019042f2-f809-46a7-bb40-396a5cc0cfe7 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Reconfigured VM to detach interface {{(pid=69328) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1167.766481] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 18022645-9a2a-489e-b0b1-486165f46f14] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1167.783671] env[69328]: DEBUG oslo_vmware.api [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274215, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.897221] env[69328]: DEBUG oslo_concurrency.lockutils [None req-acb87243-1b67-470f-a1d7-fdfc59947a9a tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1168.062292] env[69328]: DEBUG nova.scheduler.client.report [None req-670828b7-67a2-441d-8336-730d569ae3a3 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1168.125635] env[69328]: DEBUG oslo_vmware.api [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': task-3274216, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.273027] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: bc9c3a41-7264-4d69-bc15-397b5fa0a8ad] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1168.290181] env[69328]: DEBUG oslo_vmware.api [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274215, 'name': PowerOnVM_Task, 'duration_secs': 0.538903} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.290625] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1168.290908] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-5e699919-439f-433a-a89e-cec25e1f8cad tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Updating instance 'f1be93b2-08db-41fe-87c4-f4e5f964cfa4' progress to 100 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1168.569882] env[69328]: DEBUG oslo_concurrency.lockutils [None req-670828b7-67a2-441d-8336-730d569ae3a3 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.872s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1168.572771] env[69328]: DEBUG oslo_concurrency.lockutils [None req-acb87243-1b67-470f-a1d7-fdfc59947a9a tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.676s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1168.596930] env[69328]: INFO nova.scheduler.client.report [None req-670828b7-67a2-441d-8336-730d569ae3a3 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Deleted allocations for instance 52c87371-4142-40d6-ac68-804aabd9f823 [ 1168.618930] env[69328]: DEBUG oslo_vmware.api [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': task-3274216, 'name': PowerOnVM_Task, 'duration_secs': 0.753737} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.618930] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1168.619142] env[69328]: INFO nova.compute.manager [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Took 7.25 seconds to spawn the instance on the hypervisor. [ 1168.619573] env[69328]: DEBUG nova.compute.manager [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1168.620116] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e188024-9212-4191-a3e9-fb72c40b6ebe {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.626697] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1efcf580-115c-4fbd-aaae-e7bab4ac1d0f tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Acquiring lock "275ef1ed-8e60-4151-b548-e22e5bd8efe2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1168.628076] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1efcf580-115c-4fbd-aaae-e7bab4ac1d0f tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Lock "275ef1ed-8e60-4151-b548-e22e5bd8efe2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1168.628076] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1efcf580-115c-4fbd-aaae-e7bab4ac1d0f tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Acquiring lock "275ef1ed-8e60-4151-b548-e22e5bd8efe2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1168.628076] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1efcf580-115c-4fbd-aaae-e7bab4ac1d0f tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Lock "275ef1ed-8e60-4151-b548-e22e5bd8efe2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1168.628076] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1efcf580-115c-4fbd-aaae-e7bab4ac1d0f tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Lock "275ef1ed-8e60-4151-b548-e22e5bd8efe2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1168.634443] env[69328]: INFO nova.compute.manager [None req-1efcf580-115c-4fbd-aaae-e7bab4ac1d0f tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Terminating instance [ 1168.722854] env[69328]: DEBUG nova.compute.manager [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1168.749918] env[69328]: DEBUG nova.virt.hardware [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1168.750221] env[69328]: DEBUG nova.virt.hardware [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1168.750396] env[69328]: DEBUG nova.virt.hardware [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1168.750591] env[69328]: DEBUG nova.virt.hardware [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1168.750745] env[69328]: DEBUG nova.virt.hardware [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1168.750908] env[69328]: DEBUG nova.virt.hardware [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1168.751130] env[69328]: DEBUG nova.virt.hardware [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1168.751307] env[69328]: DEBUG nova.virt.hardware [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1168.751474] env[69328]: DEBUG nova.virt.hardware [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1168.751647] env[69328]: DEBUG nova.virt.hardware [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1168.751822] env[69328]: DEBUG nova.virt.hardware [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1168.752743] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3a38a5a-b1b9-460e-935b-d616a2d47560 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.764024] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba7af404-b68b-40a4-ab97-4f703fe88491 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.781499] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: d37b4f32-7ae9-46f1-8f7d-ac61fb8fd5d8] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1168.813814] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0559fd7e-55b3-43a8-a292-6b1e6c58f66b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.822710] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8f18cde-65c9-481b-9edd-07abe7f0d183 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.853437] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f096a54-598c-434c-808a-75f9e7ee6be0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.861576] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50744b16-ccc5-40fc-815b-a1727187a52a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.877075] env[69328]: DEBUG nova.compute.provider_tree [None req-acb87243-1b67-470f-a1d7-fdfc59947a9a tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1168.908587] env[69328]: DEBUG oslo_concurrency.lockutils [None req-616dd6ec-6084-4db0-b933-142e7552947e tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Acquiring lock "5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1168.908846] env[69328]: DEBUG oslo_concurrency.lockutils [None req-616dd6ec-6084-4db0-b933-142e7552947e tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Lock "5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1168.909071] env[69328]: DEBUG oslo_concurrency.lockutils [None req-616dd6ec-6084-4db0-b933-142e7552947e tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Acquiring lock "5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1168.909261] env[69328]: DEBUG oslo_concurrency.lockutils [None req-616dd6ec-6084-4db0-b933-142e7552947e tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Lock "5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1168.909424] env[69328]: DEBUG oslo_concurrency.lockutils [None req-616dd6ec-6084-4db0-b933-142e7552947e tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Lock "5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1168.911539] env[69328]: INFO nova.compute.manager [None req-616dd6ec-6084-4db0-b933-142e7552947e tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Terminating instance [ 1169.106345] env[69328]: DEBUG oslo_concurrency.lockutils [None req-670828b7-67a2-441d-8336-730d569ae3a3 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Lock "52c87371-4142-40d6-ac68-804aabd9f823" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.598s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1169.134818] env[69328]: DEBUG oslo_concurrency.lockutils [None req-019042f2-f809-46a7-bb40-396a5cc0cfe7 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "refresh_cache-de8e6616-0460-4a6e-918c-a27818da96e2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1169.135023] env[69328]: DEBUG oslo_concurrency.lockutils [None req-019042f2-f809-46a7-bb40-396a5cc0cfe7 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquired lock "refresh_cache-de8e6616-0460-4a6e-918c-a27818da96e2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1169.135210] env[69328]: DEBUG nova.network.neutron [None req-019042f2-f809-46a7-bb40-396a5cc0cfe7 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1169.140436] env[69328]: DEBUG nova.compute.manager [None req-1efcf580-115c-4fbd-aaae-e7bab4ac1d0f tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1169.140637] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-1efcf580-115c-4fbd-aaae-e7bab4ac1d0f tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1169.143034] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b512c31a-8525-4a3d-b2ec-0315975ea978 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.145880] env[69328]: INFO nova.compute.manager [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Took 17.17 seconds to build instance. [ 1169.152652] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-1efcf580-115c-4fbd-aaae-e7bab4ac1d0f tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1169.152923] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3efd3398-4d5e-4914-9c63-461f20881d8b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.162297] env[69328]: DEBUG oslo_vmware.api [None req-1efcf580-115c-4fbd-aaae-e7bab4ac1d0f tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Waiting for the task: (returnval){ [ 1169.162297] env[69328]: value = "task-3274217" [ 1169.162297] env[69328]: _type = "Task" [ 1169.162297] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.172407] env[69328]: DEBUG oslo_vmware.api [None req-1efcf580-115c-4fbd-aaae-e7bab4ac1d0f tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': task-3274217, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.283315] env[69328]: DEBUG nova.compute.manager [req-e25f026f-ac54-4f9e-8cbe-f3865b668012 req-569d64de-a90b-4eef-88f9-3fb831024f9c service nova] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Received event network-vif-plugged-d76b0cd3-fa46-430c-b29d-7439c7857ba3 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1169.283700] env[69328]: DEBUG oslo_concurrency.lockutils [req-e25f026f-ac54-4f9e-8cbe-f3865b668012 req-569d64de-a90b-4eef-88f9-3fb831024f9c service nova] Acquiring lock "0cf68559-5f07-4006-9f7f-59027e31635d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1169.283940] env[69328]: DEBUG oslo_concurrency.lockutils [req-e25f026f-ac54-4f9e-8cbe-f3865b668012 req-569d64de-a90b-4eef-88f9-3fb831024f9c service nova] Lock "0cf68559-5f07-4006-9f7f-59027e31635d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1169.284008] env[69328]: DEBUG oslo_concurrency.lockutils [req-e25f026f-ac54-4f9e-8cbe-f3865b668012 req-569d64de-a90b-4eef-88f9-3fb831024f9c service nova] Lock "0cf68559-5f07-4006-9f7f-59027e31635d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1169.284190] env[69328]: DEBUG nova.compute.manager [req-e25f026f-ac54-4f9e-8cbe-f3865b668012 req-569d64de-a90b-4eef-88f9-3fb831024f9c service nova] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] No waiting events found dispatching network-vif-plugged-d76b0cd3-fa46-430c-b29d-7439c7857ba3 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1169.284352] env[69328]: WARNING nova.compute.manager [req-e25f026f-ac54-4f9e-8cbe-f3865b668012 req-569d64de-a90b-4eef-88f9-3fb831024f9c service nova] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Received unexpected event network-vif-plugged-d76b0cd3-fa46-430c-b29d-7439c7857ba3 for instance with vm_state building and task_state spawning. [ 1169.286214] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1169.380006] env[69328]: DEBUG nova.scheduler.client.report [None req-acb87243-1b67-470f-a1d7-fdfc59947a9a tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1169.415599] env[69328]: DEBUG nova.compute.manager [None req-616dd6ec-6084-4db0-b933-142e7552947e tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1169.416667] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ffddc150-b8c4-4a92-b645-6964d4929efc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.430237] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b943f3f1-d8f9-47c9-b6d7-6dfc1a9cfecc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.473255] env[69328]: WARNING nova.virt.vmwareapi.driver [None req-616dd6ec-6084-4db0-b933-142e7552947e tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732 could not be found. [ 1169.473405] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-616dd6ec-6084-4db0-b933-142e7552947e tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1169.473859] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-162e7038-6e8e-4a18-a475-b987d5da1eb7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.484150] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbdbebc2-3942-44be-a7cd-2d5d63f0da61 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.525835] env[69328]: WARNING nova.virt.vmwareapi.vmops [None req-616dd6ec-6084-4db0-b933-142e7552947e tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732 could not be found. [ 1169.526148] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-616dd6ec-6084-4db0-b933-142e7552947e tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1169.526392] env[69328]: INFO nova.compute.manager [None req-616dd6ec-6084-4db0-b933-142e7552947e tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Took 0.11 seconds to destroy the instance on the hypervisor. [ 1169.526685] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-616dd6ec-6084-4db0-b933-142e7552947e tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1169.526995] env[69328]: DEBUG nova.compute.manager [-] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1169.527126] env[69328]: DEBUG nova.network.neutron [-] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1169.648518] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8e8b6830-34ff-478c-bb62-adb92ec276b6 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Lock "53eb70f0-1734-4386-b747-014561ba577b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.687s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1169.675120] env[69328]: DEBUG oslo_vmware.api [None req-1efcf580-115c-4fbd-aaae-e7bab4ac1d0f tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': task-3274217, 'name': PowerOffVM_Task, 'duration_secs': 0.223457} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.675453] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-1efcf580-115c-4fbd-aaae-e7bab4ac1d0f tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1169.675675] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-1efcf580-115c-4fbd-aaae-e7bab4ac1d0f tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1169.675950] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0735cc63-6f3f-4356-884f-4a6bd972176c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.747063] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-1efcf580-115c-4fbd-aaae-e7bab4ac1d0f tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1169.747422] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-1efcf580-115c-4fbd-aaae-e7bab4ac1d0f tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1169.747912] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-1efcf580-115c-4fbd-aaae-e7bab4ac1d0f tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Deleting the datastore file [datastore2] 275ef1ed-8e60-4151-b548-e22e5bd8efe2 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1169.748264] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-62f1a24a-efee-42b9-871f-28d4ce8ff347 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.758133] env[69328]: DEBUG oslo_vmware.api [None req-1efcf580-115c-4fbd-aaae-e7bab4ac1d0f tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Waiting for the task: (returnval){ [ 1169.758133] env[69328]: value = "task-3274220" [ 1169.758133] env[69328]: _type = "Task" [ 1169.758133] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.766343] env[69328]: DEBUG oslo_vmware.api [None req-1efcf580-115c-4fbd-aaae-e7bab4ac1d0f tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': task-3274220, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.781961] env[69328]: DEBUG nova.network.neutron [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Successfully updated port: d76b0cd3-fa46-430c-b29d-7439c7857ba3 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1169.888115] env[69328]: DEBUG oslo_concurrency.lockutils [None req-acb87243-1b67-470f-a1d7-fdfc59947a9a tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.315s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1169.889115] env[69328]: INFO nova.compute.manager [None req-acb87243-1b67-470f-a1d7-fdfc59947a9a tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Successfully reverted task state from rebuilding on failure for instance. [ 1170.001504] env[69328]: INFO nova.compute.manager [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Rebuilding instance [ 1170.055592] env[69328]: DEBUG nova.compute.manager [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1170.056525] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b6c4016-e9a1-4b23-9a79-230ee827f97e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.123886] env[69328]: INFO nova.network.neutron [None req-019042f2-f809-46a7-bb40-396a5cc0cfe7 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Port 571eed05-9f96-46fe-9592-59e38c00196c from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1170.124204] env[69328]: DEBUG nova.network.neutron [None req-019042f2-f809-46a7-bb40-396a5cc0cfe7 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Updating instance_info_cache with network_info: [{"id": "13436ecc-0cb3-4c13-bf18-f81195196ffd", "address": "fa:16:3e:2e:1b:14", "network": {"id": "aed15283-4a79-4e99-8b6c-49cf754138de", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1271042528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30209bc93a4042488f15c73b7e4733d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13436ecc-0c", "ovs_interfaceid": "13436ecc-0cb3-4c13-bf18-f81195196ffd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1170.278633] env[69328]: DEBUG oslo_vmware.api [None req-1efcf580-115c-4fbd-aaae-e7bab4ac1d0f tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Task: {'id': task-3274220, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143469} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.279497] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-1efcf580-115c-4fbd-aaae-e7bab4ac1d0f tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1170.279497] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-1efcf580-115c-4fbd-aaae-e7bab4ac1d0f tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1170.279497] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-1efcf580-115c-4fbd-aaae-e7bab4ac1d0f tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1170.279497] env[69328]: INFO nova.compute.manager [None req-1efcf580-115c-4fbd-aaae-e7bab4ac1d0f tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1170.279851] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1efcf580-115c-4fbd-aaae-e7bab4ac1d0f tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1170.280291] env[69328]: DEBUG nova.compute.manager [-] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1170.280521] env[69328]: DEBUG nova.network.neutron [-] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1170.286577] env[69328]: DEBUG oslo_concurrency.lockutils [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquiring lock "refresh_cache-0cf68559-5f07-4006-9f7f-59027e31635d" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1170.286577] env[69328]: DEBUG oslo_concurrency.lockutils [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquired lock "refresh_cache-0cf68559-5f07-4006-9f7f-59027e31635d" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1170.286577] env[69328]: DEBUG nova.network.neutron [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1170.441966] env[69328]: DEBUG oslo_concurrency.lockutils [None req-950fb797-d42a-4da3-923d-c9b6a0f13ea5 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "f1be93b2-08db-41fe-87c4-f4e5f964cfa4" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1170.442269] env[69328]: DEBUG oslo_concurrency.lockutils [None req-950fb797-d42a-4da3-923d-c9b6a0f13ea5 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "f1be93b2-08db-41fe-87c4-f4e5f964cfa4" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1170.442462] env[69328]: DEBUG nova.compute.manager [None req-950fb797-d42a-4da3-923d-c9b6a0f13ea5 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Going to confirm migration 7 {{(pid=69328) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1170.516720] env[69328]: DEBUG nova.compute.manager [req-63e8a0bf-2267-4081-9938-3c21b70e8128 req-3875ed1d-ca5f-4d08-a5ae-22f5a1157474 service nova] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Received event network-vif-deleted-f52daaa5-48f6-4553-ac25-4a0103a7736f {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1170.517184] env[69328]: INFO nova.compute.manager [req-63e8a0bf-2267-4081-9938-3c21b70e8128 req-3875ed1d-ca5f-4d08-a5ae-22f5a1157474 service nova] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Neutron deleted interface f52daaa5-48f6-4553-ac25-4a0103a7736f; detaching it from the instance and deleting it from the info cache [ 1170.517286] env[69328]: DEBUG nova.network.neutron [req-63e8a0bf-2267-4081-9938-3c21b70e8128 req-3875ed1d-ca5f-4d08-a5ae-22f5a1157474 service nova] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1170.626646] env[69328]: DEBUG oslo_concurrency.lockutils [None req-019042f2-f809-46a7-bb40-396a5cc0cfe7 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Releasing lock "refresh_cache-de8e6616-0460-4a6e-918c-a27818da96e2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1170.832064] env[69328]: DEBUG nova.network.neutron [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1170.990329] env[69328]: DEBUG nova.network.neutron [-] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1171.003256] env[69328]: DEBUG oslo_concurrency.lockutils [None req-950fb797-d42a-4da3-923d-c9b6a0f13ea5 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "refresh_cache-f1be93b2-08db-41fe-87c4-f4e5f964cfa4" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1171.003452] env[69328]: DEBUG oslo_concurrency.lockutils [None req-950fb797-d42a-4da3-923d-c9b6a0f13ea5 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquired lock "refresh_cache-f1be93b2-08db-41fe-87c4-f4e5f964cfa4" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1171.003640] env[69328]: DEBUG nova.network.neutron [None req-950fb797-d42a-4da3-923d-c9b6a0f13ea5 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1171.003836] env[69328]: DEBUG nova.objects.instance [None req-950fb797-d42a-4da3-923d-c9b6a0f13ea5 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lazy-loading 'info_cache' on Instance uuid f1be93b2-08db-41fe-87c4-f4e5f964cfa4 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1171.019489] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d5b1c633-ef01-45fb-9f49-0210594474d4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.033181] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28753e22-42ef-48e0-97cd-a060de2d43b0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.049238] env[69328]: DEBUG oslo_concurrency.lockutils [None req-964d019d-cedd-4e66-944c-bca5419fa197 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "interface-33583ef3-252c-45d4-a514-5646f98c5f45-571eed05-9f96-46fe-9592-59e38c00196c" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1171.049488] env[69328]: DEBUG oslo_concurrency.lockutils [None req-964d019d-cedd-4e66-944c-bca5419fa197 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "interface-33583ef3-252c-45d4-a514-5646f98c5f45-571eed05-9f96-46fe-9592-59e38c00196c" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1171.049838] env[69328]: DEBUG nova.objects.instance [None req-964d019d-cedd-4e66-944c-bca5419fa197 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lazy-loading 'flavor' on Instance uuid 33583ef3-252c-45d4-a514-5646f98c5f45 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1171.077917] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1171.078037] env[69328]: DEBUG nova.compute.manager [req-63e8a0bf-2267-4081-9938-3c21b70e8128 req-3875ed1d-ca5f-4d08-a5ae-22f5a1157474 service nova] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Detach interface failed, port_id=f52daaa5-48f6-4553-ac25-4a0103a7736f, reason: Instance 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732 could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1171.079253] env[69328]: DEBUG nova.network.neutron [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Updating instance_info_cache with network_info: [{"id": "d76b0cd3-fa46-430c-b29d-7439c7857ba3", "address": "fa:16:3e:0f:95:97", "network": {"id": "238bfce5-9117-4d8e-8dd7-445d8d894599", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-231958017-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8bbb75992830459c85c818e850261c61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3d7e184-c87f-47a5-8d0d-9fa20e07e669", "external-id": "nsx-vlan-transportzone-746", "segmentation_id": 746, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd76b0cd3-fa", "ovs_interfaceid": "d76b0cd3-fa46-430c-b29d-7439c7857ba3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1171.080779] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3455a854-faf6-4a7b-8156-2398377dc6bf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.090031] env[69328]: DEBUG oslo_vmware.api [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Waiting for the task: (returnval){ [ 1171.090031] env[69328]: value = "task-3274221" [ 1171.090031] env[69328]: _type = "Task" [ 1171.090031] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.101009] env[69328]: DEBUG oslo_vmware.api [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': task-3274221, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.131216] env[69328]: DEBUG oslo_concurrency.lockutils [None req-019042f2-f809-46a7-bb40-396a5cc0cfe7 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "interface-de8e6616-0460-4a6e-918c-a27818da96e2-571eed05-9f96-46fe-9592-59e38c00196c" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.967s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1171.349329] env[69328]: DEBUG nova.compute.manager [req-63ac6649-3f0c-4e8c-ab97-cd7e4ec0fc03 req-07a135fb-b0b8-4844-9f92-0778a34755b3 service nova] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Received event network-changed-d76b0cd3-fa46-430c-b29d-7439c7857ba3 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1171.349329] env[69328]: DEBUG nova.compute.manager [req-63ac6649-3f0c-4e8c-ab97-cd7e4ec0fc03 req-07a135fb-b0b8-4844-9f92-0778a34755b3 service nova] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Refreshing instance network info cache due to event network-changed-d76b0cd3-fa46-430c-b29d-7439c7857ba3. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1171.349329] env[69328]: DEBUG oslo_concurrency.lockutils [req-63ac6649-3f0c-4e8c-ab97-cd7e4ec0fc03 req-07a135fb-b0b8-4844-9f92-0778a34755b3 service nova] Acquiring lock "refresh_cache-0cf68559-5f07-4006-9f7f-59027e31635d" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1171.440034] env[69328]: DEBUG nova.network.neutron [-] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1171.495615] env[69328]: INFO nova.compute.manager [-] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Took 1.97 seconds to deallocate network for instance. [ 1171.584141] env[69328]: DEBUG oslo_concurrency.lockutils [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Releasing lock "refresh_cache-0cf68559-5f07-4006-9f7f-59027e31635d" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1171.584141] env[69328]: DEBUG nova.compute.manager [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Instance network_info: |[{"id": "d76b0cd3-fa46-430c-b29d-7439c7857ba3", "address": "fa:16:3e:0f:95:97", "network": {"id": "238bfce5-9117-4d8e-8dd7-445d8d894599", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-231958017-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8bbb75992830459c85c818e850261c61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3d7e184-c87f-47a5-8d0d-9fa20e07e669", "external-id": "nsx-vlan-transportzone-746", "segmentation_id": 746, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd76b0cd3-fa", "ovs_interfaceid": "d76b0cd3-fa46-430c-b29d-7439c7857ba3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1171.584312] env[69328]: DEBUG oslo_concurrency.lockutils [req-63ac6649-3f0c-4e8c-ab97-cd7e4ec0fc03 req-07a135fb-b0b8-4844-9f92-0778a34755b3 service nova] Acquired lock "refresh_cache-0cf68559-5f07-4006-9f7f-59027e31635d" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1171.584356] env[69328]: DEBUG nova.network.neutron [req-63ac6649-3f0c-4e8c-ab97-cd7e4ec0fc03 req-07a135fb-b0b8-4844-9f92-0778a34755b3 service nova] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Refreshing network info cache for port d76b0cd3-fa46-430c-b29d-7439c7857ba3 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1171.586186] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0f:95:97', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f3d7e184-c87f-47a5-8d0d-9fa20e07e669', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd76b0cd3-fa46-430c-b29d-7439c7857ba3', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1171.595603] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Creating folder: Project (8bbb75992830459c85c818e850261c61). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1171.600754] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cc497c5c-f3be-414b-9d72-f47b5b10f116 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.617349] env[69328]: DEBUG oslo_vmware.api [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': task-3274221, 'name': PowerOffVM_Task, 'duration_secs': 0.323586} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.617584] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1171.617824] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1171.618680] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e40fde31-9972-4ff4-9ade-be1593ed9a22 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.623424] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Created folder: Project (8bbb75992830459c85c818e850261c61) in parent group-v653649. [ 1171.623603] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Creating folder: Instances. Parent ref: group-v653961. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1171.624240] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-583da669-d83a-4050-9f40-6a9031b7941f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.628798] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1171.629175] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f2133c0d-1bc8-45f5-98de-efb4e7d55b37 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.635751] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Created folder: Instances in parent group-v653961. [ 1171.636084] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1171.636267] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1171.636502] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e77a6b09-f511-44a5-9880-399a8ece61f9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.659741] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1171.659959] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1171.660158] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Deleting the datastore file [datastore2] 53eb70f0-1734-4386-b747-014561ba577b {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1171.660407] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d3c5dc80-17b9-4bde-9e3a-3e9b1dc112e9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.663314] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1171.663314] env[69328]: value = "task-3274225" [ 1171.663314] env[69328]: _type = "Task" [ 1171.663314] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.668234] env[69328]: DEBUG oslo_vmware.api [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Waiting for the task: (returnval){ [ 1171.668234] env[69328]: value = "task-3274226" [ 1171.668234] env[69328]: _type = "Task" [ 1171.668234] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.678529] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274225, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.684428] env[69328]: DEBUG oslo_vmware.api [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': task-3274226, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.740600] env[69328]: DEBUG nova.objects.instance [None req-964d019d-cedd-4e66-944c-bca5419fa197 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lazy-loading 'pci_requests' on Instance uuid 33583ef3-252c-45d4-a514-5646f98c5f45 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1171.860445] env[69328]: DEBUG nova.network.neutron [req-63ac6649-3f0c-4e8c-ab97-cd7e4ec0fc03 req-07a135fb-b0b8-4844-9f92-0778a34755b3 service nova] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Updated VIF entry in instance network info cache for port d76b0cd3-fa46-430c-b29d-7439c7857ba3. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1171.860932] env[69328]: DEBUG nova.network.neutron [req-63ac6649-3f0c-4e8c-ab97-cd7e4ec0fc03 req-07a135fb-b0b8-4844-9f92-0778a34755b3 service nova] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Updating instance_info_cache with network_info: [{"id": "d76b0cd3-fa46-430c-b29d-7439c7857ba3", "address": "fa:16:3e:0f:95:97", "network": {"id": "238bfce5-9117-4d8e-8dd7-445d8d894599", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-231958017-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8bbb75992830459c85c818e850261c61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3d7e184-c87f-47a5-8d0d-9fa20e07e669", "external-id": "nsx-vlan-transportzone-746", "segmentation_id": 746, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd76b0cd3-fa", "ovs_interfaceid": "d76b0cd3-fa46-430c-b29d-7439c7857ba3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1171.943553] env[69328]: INFO nova.compute.manager [-] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Took 1.66 seconds to deallocate network for instance. [ 1172.048790] env[69328]: INFO nova.compute.manager [None req-616dd6ec-6084-4db0-b933-142e7552947e tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Took 0.55 seconds to detach 1 volumes for instance. [ 1172.051160] env[69328]: DEBUG nova.compute.manager [None req-616dd6ec-6084-4db0-b933-142e7552947e tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Deleting volume: 10778dba-1e87-4365-bb28-98360345c613 {{(pid=69328) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1172.180944] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274225, 'name': CreateVM_Task, 'duration_secs': 0.362787} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.181226] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1172.182030] env[69328]: DEBUG oslo_concurrency.lockutils [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1172.182030] env[69328]: DEBUG oslo_concurrency.lockutils [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1172.182516] env[69328]: DEBUG oslo_concurrency.lockutils [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1172.183680] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f7de08de-5a7a-460d-842d-602e1d0d19d6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.187281] env[69328]: DEBUG oslo_vmware.api [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': task-3274226, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.122876} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.187852] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1172.188070] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1172.188265] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1172.191763] env[69328]: DEBUG oslo_vmware.api [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1172.191763] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]524aef67-d2aa-be7a-556a-a208e4890d81" [ 1172.191763] env[69328]: _type = "Task" [ 1172.191763] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.201665] env[69328]: DEBUG oslo_vmware.api [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]524aef67-d2aa-be7a-556a-a208e4890d81, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.246401] env[69328]: DEBUG nova.objects.base [None req-964d019d-cedd-4e66-944c-bca5419fa197 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Object Instance<33583ef3-252c-45d4-a514-5646f98c5f45> lazy-loaded attributes: flavor,pci_requests {{(pid=69328) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1172.246623] env[69328]: DEBUG nova.network.neutron [None req-964d019d-cedd-4e66-944c-bca5419fa197 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1172.326905] env[69328]: DEBUG nova.network.neutron [None req-950fb797-d42a-4da3-923d-c9b6a0f13ea5 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Updating instance_info_cache with network_info: [{"id": "1018560a-13d7-4d01-8fc4-03d0b9beab90", "address": "fa:16:3e:33:ba:27", "network": {"id": "4031def8-0553-461f-9528-aa338b9d7b2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1834835647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.196", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f357b5a9494b4849a83aa934c5d4e26b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "357d2811-e990-4985-9f9e-b158d10d3699", "external-id": "nsx-vlan-transportzone-641", "segmentation_id": 641, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1018560a-13", "ovs_interfaceid": "1018560a-13d7-4d01-8fc4-03d0b9beab90", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1172.363761] env[69328]: DEBUG oslo_concurrency.lockutils [req-63ac6649-3f0c-4e8c-ab97-cd7e4ec0fc03 req-07a135fb-b0b8-4844-9f92-0778a34755b3 service nova] Releasing lock "refresh_cache-0cf68559-5f07-4006-9f7f-59027e31635d" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1172.363849] env[69328]: DEBUG nova.compute.manager [req-63ac6649-3f0c-4e8c-ab97-cd7e4ec0fc03 req-07a135fb-b0b8-4844-9f92-0778a34755b3 service nova] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Received event network-changed-13436ecc-0cb3-4c13-bf18-f81195196ffd {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1172.364289] env[69328]: DEBUG nova.compute.manager [req-63ac6649-3f0c-4e8c-ab97-cd7e4ec0fc03 req-07a135fb-b0b8-4844-9f92-0778a34755b3 service nova] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Refreshing instance network info cache due to event network-changed-13436ecc-0cb3-4c13-bf18-f81195196ffd. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1172.364289] env[69328]: DEBUG oslo_concurrency.lockutils [req-63ac6649-3f0c-4e8c-ab97-cd7e4ec0fc03 req-07a135fb-b0b8-4844-9f92-0778a34755b3 service nova] Acquiring lock "refresh_cache-de8e6616-0460-4a6e-918c-a27818da96e2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1172.364476] env[69328]: DEBUG oslo_concurrency.lockutils [req-63ac6649-3f0c-4e8c-ab97-cd7e4ec0fc03 req-07a135fb-b0b8-4844-9f92-0778a34755b3 service nova] Acquired lock "refresh_cache-de8e6616-0460-4a6e-918c-a27818da96e2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1172.364514] env[69328]: DEBUG nova.network.neutron [req-63ac6649-3f0c-4e8c-ab97-cd7e4ec0fc03 req-07a135fb-b0b8-4844-9f92-0778a34755b3 service nova] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Refreshing network info cache for port 13436ecc-0cb3-4c13-bf18-f81195196ffd {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1172.369746] env[69328]: DEBUG nova.policy [None req-964d019d-cedd-4e66-944c-bca5419fa197 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '69ca01fd1d0f42b0b05a5426da9753ad', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '30209bc93a4042488f15c73b7e4733d5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 1172.451333] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1efcf580-115c-4fbd-aaae-e7bab4ac1d0f tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1172.451608] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1efcf580-115c-4fbd-aaae-e7bab4ac1d0f tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1172.451820] env[69328]: DEBUG nova.objects.instance [None req-1efcf580-115c-4fbd-aaae-e7bab4ac1d0f tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Lazy-loading 'resources' on Instance uuid 275ef1ed-8e60-4151-b548-e22e5bd8efe2 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1172.547378] env[69328]: DEBUG nova.compute.manager [req-42c50578-1908-43b0-ae74-69930755de6e req-fff529ad-8f95-4740-91a7-a146f681b854 service nova] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Received event network-vif-deleted-4516486f-d6cd-476a-a5ad-3d3fd9191731 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1172.597907] env[69328]: DEBUG oslo_concurrency.lockutils [None req-616dd6ec-6084-4db0-b933-142e7552947e tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1172.705988] env[69328]: DEBUG oslo_vmware.api [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]524aef67-d2aa-be7a-556a-a208e4890d81, 'name': SearchDatastore_Task, 'duration_secs': 0.011816} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.706431] env[69328]: DEBUG oslo_concurrency.lockutils [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1172.706691] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1172.706934] env[69328]: DEBUG oslo_concurrency.lockutils [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1172.707127] env[69328]: DEBUG oslo_concurrency.lockutils [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1172.707321] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1172.707591] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ad5a4179-6186-49e5-9b04-9be9c477210d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.718479] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1172.718715] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1172.719755] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-08127b6b-1872-4f4f-a660-d65ec670efe2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.728280] env[69328]: DEBUG oslo_vmware.api [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1172.728280] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52bc5f2c-f127-658e-8f45-be9ff5174237" [ 1172.728280] env[69328]: _type = "Task" [ 1172.728280] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.739847] env[69328]: DEBUG oslo_vmware.api [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52bc5f2c-f127-658e-8f45-be9ff5174237, 'name': SearchDatastore_Task} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.742824] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47d0743d-6b80-4d78-a0b5-e1f0713b9f72 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.749311] env[69328]: DEBUG oslo_vmware.api [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1172.749311] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52fc7f88-2be0-2d88-c524-dcaa53716541" [ 1172.749311] env[69328]: _type = "Task" [ 1172.749311] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.759554] env[69328]: DEBUG oslo_vmware.api [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52fc7f88-2be0-2d88-c524-dcaa53716541, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.830583] env[69328]: DEBUG oslo_concurrency.lockutils [None req-950fb797-d42a-4da3-923d-c9b6a0f13ea5 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Releasing lock "refresh_cache-f1be93b2-08db-41fe-87c4-f4e5f964cfa4" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1172.830965] env[69328]: DEBUG nova.objects.instance [None req-950fb797-d42a-4da3-923d-c9b6a0f13ea5 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lazy-loading 'migration_context' on Instance uuid f1be93b2-08db-41fe-87c4-f4e5f964cfa4 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1173.070680] env[69328]: DEBUG nova.network.neutron [req-63ac6649-3f0c-4e8c-ab97-cd7e4ec0fc03 req-07a135fb-b0b8-4844-9f92-0778a34755b3 service nova] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Updated VIF entry in instance network info cache for port 13436ecc-0cb3-4c13-bf18-f81195196ffd. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1173.071242] env[69328]: DEBUG nova.network.neutron [req-63ac6649-3f0c-4e8c-ab97-cd7e4ec0fc03 req-07a135fb-b0b8-4844-9f92-0778a34755b3 service nova] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Updating instance_info_cache with network_info: [{"id": "13436ecc-0cb3-4c13-bf18-f81195196ffd", "address": "fa:16:3e:2e:1b:14", "network": {"id": "aed15283-4a79-4e99-8b6c-49cf754138de", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1271042528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30209bc93a4042488f15c73b7e4733d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13436ecc-0c", "ovs_interfaceid": "13436ecc-0cb3-4c13-bf18-f81195196ffd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1173.168860] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80e724ec-e40a-4137-8739-10f6d0996fcd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.176856] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5697dfcd-3882-4a40-a7ea-823c934db8d6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.212178] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e63f5d0-2ec2-4d28-8841-bf2f47caa424 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.219965] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8788f8b3-a96b-42e2-a735-ff27231ff76a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.234021] env[69328]: DEBUG nova.compute.provider_tree [None req-1efcf580-115c-4fbd-aaae-e7bab4ac1d0f tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1173.246229] env[69328]: DEBUG nova.virt.hardware [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1173.246457] env[69328]: DEBUG nova.virt.hardware [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1173.246612] env[69328]: DEBUG nova.virt.hardware [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1173.246790] env[69328]: DEBUG nova.virt.hardware [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1173.246941] env[69328]: DEBUG nova.virt.hardware [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1173.247100] env[69328]: DEBUG nova.virt.hardware [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1173.247312] env[69328]: DEBUG nova.virt.hardware [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1173.247472] env[69328]: DEBUG nova.virt.hardware [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1173.247635] env[69328]: DEBUG nova.virt.hardware [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1173.247801] env[69328]: DEBUG nova.virt.hardware [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1173.247975] env[69328]: DEBUG nova.virt.hardware [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1173.248978] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee7c75bf-b0c3-4e6c-82f0-4ca5655692d4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.261661] env[69328]: DEBUG oslo_vmware.api [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52fc7f88-2be0-2d88-c524-dcaa53716541, 'name': SearchDatastore_Task, 'duration_secs': 0.010413} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.263355] env[69328]: DEBUG oslo_concurrency.lockutils [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1173.263634] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 0cf68559-5f07-4006-9f7f-59027e31635d/0cf68559-5f07-4006-9f7f-59027e31635d.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1173.263908] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c07b8cc5-049d-4968-853d-f2d78dcdf975 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.266539] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9eebd5a-c9c1-47b7-b379-e34e94baa29f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.280756] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Instance VIF info [] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1173.286906] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1173.288861] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1173.289199] env[69328]: DEBUG oslo_vmware.api [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1173.289199] env[69328]: value = "task-3274228" [ 1173.289199] env[69328]: _type = "Task" [ 1173.289199] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.289387] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-95560699-df60-4ffc-9f59-d73cb794eec9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.309528] env[69328]: DEBUG oslo_vmware.api [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274228, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.310676] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1173.310676] env[69328]: value = "task-3274229" [ 1173.310676] env[69328]: _type = "Task" [ 1173.310676] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.318123] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274229, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.334526] env[69328]: DEBUG nova.objects.base [None req-950fb797-d42a-4da3-923d-c9b6a0f13ea5 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=69328) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1173.335669] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eaf757c-637a-4fe9-afb5-2aea20556939 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.359721] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc855185-d11f-40ca-906e-3b1ada375ad0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.366981] env[69328]: DEBUG oslo_vmware.api [None req-950fb797-d42a-4da3-923d-c9b6a0f13ea5 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1173.366981] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52a7b032-4756-095c-0083-68b729a915eb" [ 1173.366981] env[69328]: _type = "Task" [ 1173.366981] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.376173] env[69328]: DEBUG oslo_vmware.api [None req-950fb797-d42a-4da3-923d-c9b6a0f13ea5 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52a7b032-4756-095c-0083-68b729a915eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.575190] env[69328]: DEBUG oslo_concurrency.lockutils [req-63ac6649-3f0c-4e8c-ab97-cd7e4ec0fc03 req-07a135fb-b0b8-4844-9f92-0778a34755b3 service nova] Releasing lock "refresh_cache-de8e6616-0460-4a6e-918c-a27818da96e2" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1173.575637] env[69328]: DEBUG nova.compute.manager [req-63ac6649-3f0c-4e8c-ab97-cd7e4ec0fc03 req-07a135fb-b0b8-4844-9f92-0778a34755b3 service nova] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Received event network-changed-509b2377-84e7-48a6-b2ed-811f288cc65c {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1173.575939] env[69328]: DEBUG nova.compute.manager [req-63ac6649-3f0c-4e8c-ab97-cd7e4ec0fc03 req-07a135fb-b0b8-4844-9f92-0778a34755b3 service nova] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Refreshing instance network info cache due to event network-changed-509b2377-84e7-48a6-b2ed-811f288cc65c. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1173.576310] env[69328]: DEBUG oslo_concurrency.lockutils [req-63ac6649-3f0c-4e8c-ab97-cd7e4ec0fc03 req-07a135fb-b0b8-4844-9f92-0778a34755b3 service nova] Acquiring lock "refresh_cache-33583ef3-252c-45d4-a514-5646f98c5f45" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1173.576583] env[69328]: DEBUG oslo_concurrency.lockutils [req-63ac6649-3f0c-4e8c-ab97-cd7e4ec0fc03 req-07a135fb-b0b8-4844-9f92-0778a34755b3 service nova] Acquired lock "refresh_cache-33583ef3-252c-45d4-a514-5646f98c5f45" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1173.576864] env[69328]: DEBUG nova.network.neutron [req-63ac6649-3f0c-4e8c-ab97-cd7e4ec0fc03 req-07a135fb-b0b8-4844-9f92-0778a34755b3 service nova] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Refreshing network info cache for port 509b2377-84e7-48a6-b2ed-811f288cc65c {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1173.740551] env[69328]: DEBUG nova.scheduler.client.report [None req-1efcf580-115c-4fbd-aaae-e7bab4ac1d0f tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1173.811923] env[69328]: DEBUG oslo_vmware.api [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274228, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.460614} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.815718] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 0cf68559-5f07-4006-9f7f-59027e31635d/0cf68559-5f07-4006-9f7f-59027e31635d.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1173.815879] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1173.816441] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0aa82927-8231-41bb-a51d-ac1f15e96c07 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.823462] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274229, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.824764] env[69328]: DEBUG oslo_vmware.api [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1173.824764] env[69328]: value = "task-3274230" [ 1173.824764] env[69328]: _type = "Task" [ 1173.824764] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.833313] env[69328]: DEBUG oslo_vmware.api [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274230, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.877781] env[69328]: DEBUG oslo_vmware.api [None req-950fb797-d42a-4da3-923d-c9b6a0f13ea5 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52a7b032-4756-095c-0083-68b729a915eb, 'name': SearchDatastore_Task, 'duration_secs': 0.009491} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.878255] env[69328]: DEBUG oslo_concurrency.lockutils [None req-950fb797-d42a-4da3-923d-c9b6a0f13ea5 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1173.966911] env[69328]: DEBUG nova.network.neutron [None req-964d019d-cedd-4e66-944c-bca5419fa197 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Successfully updated port: 571eed05-9f96-46fe-9592-59e38c00196c {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1174.201093] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7cf9fea3-4624-4630-9dea-8151efd937d0 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquiring lock "1413dcfe-3570-4657-b811-81a1acc159d1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1174.203545] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7cf9fea3-4624-4630-9dea-8151efd937d0 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Lock "1413dcfe-3570-4657-b811-81a1acc159d1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1174.203545] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7cf9fea3-4624-4630-9dea-8151efd937d0 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquiring lock "1413dcfe-3570-4657-b811-81a1acc159d1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1174.203545] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7cf9fea3-4624-4630-9dea-8151efd937d0 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Lock "1413dcfe-3570-4657-b811-81a1acc159d1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1174.203545] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7cf9fea3-4624-4630-9dea-8151efd937d0 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Lock "1413dcfe-3570-4657-b811-81a1acc159d1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1174.209057] env[69328]: INFO nova.compute.manager [None req-7cf9fea3-4624-4630-9dea-8151efd937d0 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Terminating instance [ 1174.247150] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1efcf580-115c-4fbd-aaae-e7bab4ac1d0f tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.795s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1174.250224] env[69328]: DEBUG oslo_concurrency.lockutils [None req-616dd6ec-6084-4db0-b933-142e7552947e tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.652s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1174.250483] env[69328]: DEBUG nova.objects.instance [None req-616dd6ec-6084-4db0-b933-142e7552947e tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Lazy-loading 'resources' on Instance uuid 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1174.274236] env[69328]: INFO nova.scheduler.client.report [None req-1efcf580-115c-4fbd-aaae-e7bab4ac1d0f tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Deleted allocations for instance 275ef1ed-8e60-4151-b548-e22e5bd8efe2 [ 1174.325589] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274229, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.334601] env[69328]: DEBUG oslo_vmware.api [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274230, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07001} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.334901] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1174.336361] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca1071f4-45cf-4137-9790-00fddfe9db82 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.365514] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Reconfiguring VM instance instance-00000077 to attach disk [datastore1] 0cf68559-5f07-4006-9f7f-59027e31635d/0cf68559-5f07-4006-9f7f-59027e31635d.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1174.366897] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4fee958e-340f-4a8b-82e6-80d06b81a6ef {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.383531] env[69328]: DEBUG nova.network.neutron [req-63ac6649-3f0c-4e8c-ab97-cd7e4ec0fc03 req-07a135fb-b0b8-4844-9f92-0778a34755b3 service nova] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Updated VIF entry in instance network info cache for port 509b2377-84e7-48a6-b2ed-811f288cc65c. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1174.383899] env[69328]: DEBUG nova.network.neutron [req-63ac6649-3f0c-4e8c-ab97-cd7e4ec0fc03 req-07a135fb-b0b8-4844-9f92-0778a34755b3 service nova] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Updating instance_info_cache with network_info: [{"id": "509b2377-84e7-48a6-b2ed-811f288cc65c", "address": "fa:16:3e:f8:6d:5c", "network": {"id": "aed15283-4a79-4e99-8b6c-49cf754138de", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1271042528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30209bc93a4042488f15c73b7e4733d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap509b2377-84", "ovs_interfaceid": "509b2377-84e7-48a6-b2ed-811f288cc65c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1174.396274] env[69328]: DEBUG oslo_vmware.api [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1174.396274] env[69328]: value = "task-3274231" [ 1174.396274] env[69328]: _type = "Task" [ 1174.396274] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.414071] env[69328]: DEBUG oslo_vmware.api [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274231, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.469962] env[69328]: DEBUG oslo_concurrency.lockutils [None req-964d019d-cedd-4e66-944c-bca5419fa197 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "refresh_cache-33583ef3-252c-45d4-a514-5646f98c5f45" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1174.715074] env[69328]: DEBUG nova.compute.manager [None req-7cf9fea3-4624-4630-9dea-8151efd937d0 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1174.715333] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7cf9fea3-4624-4630-9dea-8151efd937d0 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1174.716254] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7fed49f-af3f-4938-b1d9-f8d1465f3e79 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.727149] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cf9fea3-4624-4630-9dea-8151efd937d0 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1174.731043] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a5dace40-0715-42b4-b34b-eb70cb2322e0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.735131] env[69328]: DEBUG nova.compute.manager [req-56b6bce7-a122-4d35-bc1c-4d876a98aa74 req-772b5bfd-0de1-43ff-a97b-17c8c0dad458 service nova] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Received event network-vif-plugged-571eed05-9f96-46fe-9592-59e38c00196c {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1174.735360] env[69328]: DEBUG oslo_concurrency.lockutils [req-56b6bce7-a122-4d35-bc1c-4d876a98aa74 req-772b5bfd-0de1-43ff-a97b-17c8c0dad458 service nova] Acquiring lock "33583ef3-252c-45d4-a514-5646f98c5f45-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1174.736027] env[69328]: DEBUG oslo_concurrency.lockutils [req-56b6bce7-a122-4d35-bc1c-4d876a98aa74 req-772b5bfd-0de1-43ff-a97b-17c8c0dad458 service nova] Lock "33583ef3-252c-45d4-a514-5646f98c5f45-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1174.736863] env[69328]: DEBUG oslo_concurrency.lockutils [req-56b6bce7-a122-4d35-bc1c-4d876a98aa74 req-772b5bfd-0de1-43ff-a97b-17c8c0dad458 service nova] Lock "33583ef3-252c-45d4-a514-5646f98c5f45-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1174.736863] env[69328]: DEBUG nova.compute.manager [req-56b6bce7-a122-4d35-bc1c-4d876a98aa74 req-772b5bfd-0de1-43ff-a97b-17c8c0dad458 service nova] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] No waiting events found dispatching network-vif-plugged-571eed05-9f96-46fe-9592-59e38c00196c {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1174.736863] env[69328]: WARNING nova.compute.manager [req-56b6bce7-a122-4d35-bc1c-4d876a98aa74 req-772b5bfd-0de1-43ff-a97b-17c8c0dad458 service nova] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Received unexpected event network-vif-plugged-571eed05-9f96-46fe-9592-59e38c00196c for instance with vm_state active and task_state None. [ 1174.737102] env[69328]: DEBUG nova.compute.manager [req-56b6bce7-a122-4d35-bc1c-4d876a98aa74 req-772b5bfd-0de1-43ff-a97b-17c8c0dad458 service nova] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Received event network-changed-571eed05-9f96-46fe-9592-59e38c00196c {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1174.737310] env[69328]: DEBUG nova.compute.manager [req-56b6bce7-a122-4d35-bc1c-4d876a98aa74 req-772b5bfd-0de1-43ff-a97b-17c8c0dad458 service nova] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Refreshing instance network info cache due to event network-changed-571eed05-9f96-46fe-9592-59e38c00196c. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1174.737531] env[69328]: DEBUG oslo_concurrency.lockutils [req-56b6bce7-a122-4d35-bc1c-4d876a98aa74 req-772b5bfd-0de1-43ff-a97b-17c8c0dad458 service nova] Acquiring lock "refresh_cache-33583ef3-252c-45d4-a514-5646f98c5f45" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1174.744406] env[69328]: DEBUG oslo_vmware.api [None req-7cf9fea3-4624-4630-9dea-8151efd937d0 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 1174.744406] env[69328]: value = "task-3274232" [ 1174.744406] env[69328]: _type = "Task" [ 1174.744406] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.757594] env[69328]: DEBUG oslo_vmware.api [None req-7cf9fea3-4624-4630-9dea-8151efd937d0 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3274232, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.786026] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1efcf580-115c-4fbd-aaae-e7bab4ac1d0f tempest-VolumesAdminNegativeTest-1018559252 tempest-VolumesAdminNegativeTest-1018559252-project-member] Lock "275ef1ed-8e60-4151-b548-e22e5bd8efe2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.157s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1174.830781] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274229, 'name': CreateVM_Task, 'duration_secs': 1.39518} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.830781] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1174.830781] env[69328]: DEBUG oslo_concurrency.lockutils [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1174.830781] env[69328]: DEBUG oslo_concurrency.lockutils [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1174.830781] env[69328]: DEBUG oslo_concurrency.lockutils [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1174.830781] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c8545c2c-daef-49c9-bc34-9e11d98b04fd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.839353] env[69328]: DEBUG oslo_vmware.api [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Waiting for the task: (returnval){ [ 1174.839353] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52edd42d-0859-dfe2-2542-932892e98e56" [ 1174.839353] env[69328]: _type = "Task" [ 1174.839353] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.851268] env[69328]: DEBUG oslo_vmware.api [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52edd42d-0859-dfe2-2542-932892e98e56, 'name': SearchDatastore_Task, 'duration_secs': 0.011497} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.852252] env[69328]: DEBUG oslo_concurrency.lockutils [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1174.852530] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1174.852750] env[69328]: DEBUG oslo_concurrency.lockutils [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1174.852902] env[69328]: DEBUG oslo_concurrency.lockutils [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1174.853096] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1174.853369] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3b7cfc0f-15f9-4b7d-87d4-c7682bad44d9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.871937] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1174.871937] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1174.871937] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c969c5cc-9e4e-43d4-8346-97dedc7cd854 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.883745] env[69328]: DEBUG oslo_vmware.api [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Waiting for the task: (returnval){ [ 1174.883745] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52088592-3ad7-6e8e-5801-25f1889fc713" [ 1174.883745] env[69328]: _type = "Task" [ 1174.883745] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.891775] env[69328]: DEBUG oslo_concurrency.lockutils [req-63ac6649-3f0c-4e8c-ab97-cd7e4ec0fc03 req-07a135fb-b0b8-4844-9f92-0778a34755b3 service nova] Releasing lock "refresh_cache-33583ef3-252c-45d4-a514-5646f98c5f45" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1174.895502] env[69328]: DEBUG oslo_concurrency.lockutils [None req-964d019d-cedd-4e66-944c-bca5419fa197 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquired lock "refresh_cache-33583ef3-252c-45d4-a514-5646f98c5f45" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1174.895715] env[69328]: DEBUG nova.network.neutron [None req-964d019d-cedd-4e66-944c-bca5419fa197 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1174.896891] env[69328]: DEBUG oslo_vmware.api [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52088592-3ad7-6e8e-5801-25f1889fc713, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.906374] env[69328]: DEBUG oslo_vmware.api [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274231, 'name': ReconfigVM_Task, 'duration_secs': 0.313308} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.909082] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Reconfigured VM instance instance-00000077 to attach disk [datastore1] 0cf68559-5f07-4006-9f7f-59027e31635d/0cf68559-5f07-4006-9f7f-59027e31635d.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1174.910512] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0f58e813-26ca-4e9a-98c9-d58d80dd3908 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.918516] env[69328]: DEBUG oslo_vmware.api [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1174.918516] env[69328]: value = "task-3274233" [ 1174.918516] env[69328]: _type = "Task" [ 1174.918516] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.935325] env[69328]: DEBUG oslo_vmware.api [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274233, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.016498] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fffc723a-9c61-45fc-bf66-e675a42f2749 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.026899] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af71421d-a3b0-49c6-97cf-26cbbb0625ba {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.064651] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-462eb5cf-e193-4efc-b3ba-f4185bd2357e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.072889] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53e7d12b-85b7-4ae4-ae5b-006dbc815c56 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.087587] env[69328]: DEBUG nova.compute.provider_tree [None req-616dd6ec-6084-4db0-b933-142e7552947e tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1175.259960] env[69328]: DEBUG oslo_vmware.api [None req-7cf9fea3-4624-4630-9dea-8151efd937d0 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3274232, 'name': PowerOffVM_Task, 'duration_secs': 0.324454} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1175.263110] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cf9fea3-4624-4630-9dea-8151efd937d0 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1175.263110] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7cf9fea3-4624-4630-9dea-8151efd937d0 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1175.263110] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-00af7b69-9f4f-461d-a55c-2c9dd747e103 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.336482] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7cf9fea3-4624-4630-9dea-8151efd937d0 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1175.336482] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7cf9fea3-4624-4630-9dea-8151efd937d0 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1175.336482] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-7cf9fea3-4624-4630-9dea-8151efd937d0 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Deleting the datastore file [datastore1] 1413dcfe-3570-4657-b811-81a1acc159d1 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1175.336482] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3713c1c2-cdf0-44ce-85a2-69adc0b8a9f2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.345387] env[69328]: DEBUG oslo_vmware.api [None req-7cf9fea3-4624-4630-9dea-8151efd937d0 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 1175.345387] env[69328]: value = "task-3274235" [ 1175.345387] env[69328]: _type = "Task" [ 1175.345387] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1175.353690] env[69328]: DEBUG oslo_vmware.api [None req-7cf9fea3-4624-4630-9dea-8151efd937d0 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3274235, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.394332] env[69328]: DEBUG oslo_vmware.api [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52088592-3ad7-6e8e-5801-25f1889fc713, 'name': SearchDatastore_Task, 'duration_secs': 0.012223} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1175.395212] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c5bf68b-40ad-4963-ae58-4f04dd0cbab7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.404043] env[69328]: DEBUG oslo_vmware.api [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Waiting for the task: (returnval){ [ 1175.404043] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]522de9a6-ea0e-db98-cfc8-d58f721f1cfe" [ 1175.404043] env[69328]: _type = "Task" [ 1175.404043] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1175.413892] env[69328]: DEBUG oslo_vmware.api [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]522de9a6-ea0e-db98-cfc8-d58f721f1cfe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.428432] env[69328]: DEBUG oslo_vmware.api [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274233, 'name': Rename_Task, 'duration_secs': 0.170122} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1175.428711] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1175.428957] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-31ddba7c-fc61-494f-bd85-8630017bd638 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.436819] env[69328]: DEBUG oslo_vmware.api [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1175.436819] env[69328]: value = "task-3274236" [ 1175.436819] env[69328]: _type = "Task" [ 1175.436819] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1175.447420] env[69328]: DEBUG oslo_vmware.api [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274236, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.448376] env[69328]: WARNING nova.network.neutron [None req-964d019d-cedd-4e66-944c-bca5419fa197 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] aed15283-4a79-4e99-8b6c-49cf754138de already exists in list: networks containing: ['aed15283-4a79-4e99-8b6c-49cf754138de']. ignoring it [ 1175.611922] env[69328]: ERROR nova.scheduler.client.report [None req-616dd6ec-6084-4db0-b933-142e7552947e tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [req-a4086a61-3428-4a2d-9fca-3765e7070992] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-a4086a61-3428-4a2d-9fca-3765e7070992"}]} [ 1175.634810] env[69328]: DEBUG nova.scheduler.client.report [None req-616dd6ec-6084-4db0-b933-142e7552947e tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Refreshing inventories for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1175.649230] env[69328]: DEBUG nova.scheduler.client.report [None req-616dd6ec-6084-4db0-b933-142e7552947e tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Updating ProviderTree inventory for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1175.650493] env[69328]: DEBUG nova.compute.provider_tree [None req-616dd6ec-6084-4db0-b933-142e7552947e tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1175.666673] env[69328]: DEBUG nova.scheduler.client.report [None req-616dd6ec-6084-4db0-b933-142e7552947e tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Refreshing aggregate associations for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e, aggregates: None {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1175.686191] env[69328]: DEBUG nova.scheduler.client.report [None req-616dd6ec-6084-4db0-b933-142e7552947e tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Refreshing trait associations for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e, traits: COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1175.861858] env[69328]: DEBUG oslo_vmware.api [None req-7cf9fea3-4624-4630-9dea-8151efd937d0 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3274235, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.208218} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1175.861858] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-7cf9fea3-4624-4630-9dea-8151efd937d0 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1175.861858] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7cf9fea3-4624-4630-9dea-8151efd937d0 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1175.862053] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7cf9fea3-4624-4630-9dea-8151efd937d0 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1175.864857] env[69328]: INFO nova.compute.manager [None req-7cf9fea3-4624-4630-9dea-8151efd937d0 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1175.864857] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7cf9fea3-4624-4630-9dea-8151efd937d0 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1175.864857] env[69328]: DEBUG nova.compute.manager [-] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1175.864857] env[69328]: DEBUG nova.network.neutron [-] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1175.915122] env[69328]: DEBUG oslo_vmware.api [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]522de9a6-ea0e-db98-cfc8-d58f721f1cfe, 'name': SearchDatastore_Task, 'duration_secs': 0.010744} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1175.916572] env[69328]: DEBUG oslo_concurrency.lockutils [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1175.916851] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 53eb70f0-1734-4386-b747-014561ba577b/53eb70f0-1734-4386-b747-014561ba577b.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1175.918162] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7447924-dd66-46a9-a433-5887c049c001 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.920595] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0b7c1f3a-1d41-4199-ac86-de627671c481 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.929877] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be1e8be7-0b1b-40a4-9543-28969e42c947 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.933304] env[69328]: DEBUG oslo_vmware.api [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Waiting for the task: (returnval){ [ 1175.933304] env[69328]: value = "task-3274237" [ 1175.933304] env[69328]: _type = "Task" [ 1175.933304] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1175.968957] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1c544f9-2391-4bc5-8dcb-208f4d99e740 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.976555] env[69328]: DEBUG oslo_vmware.api [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': task-3274237, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.981519] env[69328]: DEBUG oslo_vmware.api [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274236, 'name': PowerOnVM_Task, 'duration_secs': 0.505568} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1175.981947] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1175.982179] env[69328]: INFO nova.compute.manager [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Took 7.26 seconds to spawn the instance on the hypervisor. [ 1175.982391] env[69328]: DEBUG nova.compute.manager [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1175.983656] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c72b80b0-a68d-4b37-8207-290e40feb3dc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.988395] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4815ab66-32be-4d58-b258-d6581eec7b8d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.002707] env[69328]: DEBUG nova.compute.provider_tree [None req-616dd6ec-6084-4db0-b933-142e7552947e tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1176.178036] env[69328]: DEBUG nova.network.neutron [None req-964d019d-cedd-4e66-944c-bca5419fa197 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Updating instance_info_cache with network_info: [{"id": "509b2377-84e7-48a6-b2ed-811f288cc65c", "address": "fa:16:3e:f8:6d:5c", "network": {"id": "aed15283-4a79-4e99-8b6c-49cf754138de", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1271042528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30209bc93a4042488f15c73b7e4733d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap509b2377-84", "ovs_interfaceid": "509b2377-84e7-48a6-b2ed-811f288cc65c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "571eed05-9f96-46fe-9592-59e38c00196c", "address": "fa:16:3e:dc:0e:cb", "network": {"id": "aed15283-4a79-4e99-8b6c-49cf754138de", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1271042528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30209bc93a4042488f15c73b7e4733d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap571eed05-9f", "ovs_interfaceid": "571eed05-9f96-46fe-9592-59e38c00196c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1176.447264] env[69328]: DEBUG oslo_vmware.api [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': task-3274237, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.522217] env[69328]: INFO nova.compute.manager [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Took 16.93 seconds to build instance. [ 1176.538645] env[69328]: ERROR nova.scheduler.client.report [None req-616dd6ec-6084-4db0-b933-142e7552947e tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] [req-e62fe203-589b-458d-8a6c-24600e3fe37e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-e62fe203-589b-458d-8a6c-24600e3fe37e"}]} [ 1176.557156] env[69328]: DEBUG nova.scheduler.client.report [None req-616dd6ec-6084-4db0-b933-142e7552947e tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Refreshing inventories for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1176.577627] env[69328]: DEBUG nova.scheduler.client.report [None req-616dd6ec-6084-4db0-b933-142e7552947e tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Updating ProviderTree inventory for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1176.578802] env[69328]: DEBUG nova.compute.provider_tree [None req-616dd6ec-6084-4db0-b933-142e7552947e tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1176.598707] env[69328]: DEBUG nova.scheduler.client.report [None req-616dd6ec-6084-4db0-b933-142e7552947e tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Refreshing aggregate associations for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e, aggregates: None {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1176.620905] env[69328]: DEBUG nova.scheduler.client.report [None req-616dd6ec-6084-4db0-b933-142e7552947e tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Refreshing trait associations for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e, traits: COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1176.680563] env[69328]: DEBUG oslo_concurrency.lockutils [None req-964d019d-cedd-4e66-944c-bca5419fa197 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Releasing lock "refresh_cache-33583ef3-252c-45d4-a514-5646f98c5f45" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1176.681271] env[69328]: DEBUG oslo_concurrency.lockutils [None req-964d019d-cedd-4e66-944c-bca5419fa197 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "33583ef3-252c-45d4-a514-5646f98c5f45" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1176.681422] env[69328]: DEBUG oslo_concurrency.lockutils [None req-964d019d-cedd-4e66-944c-bca5419fa197 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquired lock "33583ef3-252c-45d4-a514-5646f98c5f45" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1176.681712] env[69328]: DEBUG oslo_concurrency.lockutils [req-56b6bce7-a122-4d35-bc1c-4d876a98aa74 req-772b5bfd-0de1-43ff-a97b-17c8c0dad458 service nova] Acquired lock "refresh_cache-33583ef3-252c-45d4-a514-5646f98c5f45" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1176.681887] env[69328]: DEBUG nova.network.neutron [req-56b6bce7-a122-4d35-bc1c-4d876a98aa74 req-772b5bfd-0de1-43ff-a97b-17c8c0dad458 service nova] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Refreshing network info cache for port 571eed05-9f96-46fe-9592-59e38c00196c {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1176.688025] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e96fb33-5bf0-4ed5-b029-70b68c2c937b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.704772] env[69328]: DEBUG nova.virt.hardware [None req-964d019d-cedd-4e66-944c-bca5419fa197 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1176.705124] env[69328]: DEBUG nova.virt.hardware [None req-964d019d-cedd-4e66-944c-bca5419fa197 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1176.705262] env[69328]: DEBUG nova.virt.hardware [None req-964d019d-cedd-4e66-944c-bca5419fa197 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1176.705447] env[69328]: DEBUG nova.virt.hardware [None req-964d019d-cedd-4e66-944c-bca5419fa197 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1176.705589] env[69328]: DEBUG nova.virt.hardware [None req-964d019d-cedd-4e66-944c-bca5419fa197 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1176.705735] env[69328]: DEBUG nova.virt.hardware [None req-964d019d-cedd-4e66-944c-bca5419fa197 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1176.705944] env[69328]: DEBUG nova.virt.hardware [None req-964d019d-cedd-4e66-944c-bca5419fa197 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1176.706117] env[69328]: DEBUG nova.virt.hardware [None req-964d019d-cedd-4e66-944c-bca5419fa197 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1176.706287] env[69328]: DEBUG nova.virt.hardware [None req-964d019d-cedd-4e66-944c-bca5419fa197 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1176.706450] env[69328]: DEBUG nova.virt.hardware [None req-964d019d-cedd-4e66-944c-bca5419fa197 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1176.706623] env[69328]: DEBUG nova.virt.hardware [None req-964d019d-cedd-4e66-944c-bca5419fa197 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1176.713504] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-964d019d-cedd-4e66-944c-bca5419fa197 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Reconfiguring VM to attach interface {{(pid=69328) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1176.718701] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-58611494-9e8f-444b-a3f5-5a435664eda4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.738232] env[69328]: DEBUG oslo_vmware.api [None req-964d019d-cedd-4e66-944c-bca5419fa197 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for the task: (returnval){ [ 1176.738232] env[69328]: value = "task-3274238" [ 1176.738232] env[69328]: _type = "Task" [ 1176.738232] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.748319] env[69328]: DEBUG oslo_vmware.api [None req-964d019d-cedd-4e66-944c-bca5419fa197 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274238, 'name': ReconfigVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.944602] env[69328]: DEBUG oslo_vmware.api [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': task-3274237, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.851622} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.946045] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 53eb70f0-1734-4386-b747-014561ba577b/53eb70f0-1734-4386-b747-014561ba577b.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1176.946288] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1176.950879] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a2ac6b19-ff44-4245-a629-7ee8cd9d8c67 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.953199] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Acquiring lock "d19f6a2a-3a16-4031-8c20-143ccfd6f5f5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1176.953419] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lock "d19f6a2a-3a16-4031-8c20-143ccfd6f5f5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1176.955113] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-150dddc3-0f33-4957-8c93-3094767af6b0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.961155] env[69328]: DEBUG nova.compute.manager [req-9e5db71f-a14f-4ad6-ad39-24fbb57f6b32 req-7903fb3d-7053-407d-a137-fcf80b668ebb service nova] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Received event network-vif-deleted-7843ca64-fb43-4866-9bd7-f10b7c7e085e {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1176.961385] env[69328]: INFO nova.compute.manager [req-9e5db71f-a14f-4ad6-ad39-24fbb57f6b32 req-7903fb3d-7053-407d-a137-fcf80b668ebb service nova] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Neutron deleted interface 7843ca64-fb43-4866-9bd7-f10b7c7e085e; detaching it from the instance and deleting it from the info cache [ 1176.961720] env[69328]: DEBUG nova.network.neutron [req-9e5db71f-a14f-4ad6-ad39-24fbb57f6b32 req-7903fb3d-7053-407d-a137-fcf80b668ebb service nova] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1176.968092] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a8a27d1-6f04-4adb-a407-5f45a7923e71 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.973193] env[69328]: DEBUG oslo_vmware.api [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Waiting for the task: (returnval){ [ 1176.973193] env[69328]: value = "task-3274239" [ 1176.973193] env[69328]: _type = "Task" [ 1176.973193] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.013707] env[69328]: DEBUG nova.network.neutron [-] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1177.017714] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-928aba0d-e9a1-4fc4-8b0b-462b1bef58ef {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.022735] env[69328]: DEBUG oslo_vmware.api [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': task-3274239, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.024836] env[69328]: DEBUG oslo_concurrency.lockutils [None req-60853e4f-9ef4-4e3c-a589-a3cd43ad15bf tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "0cf68559-5f07-4006-9f7f-59027e31635d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.440s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1177.029661] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd9a3f6c-3f22-46c5-9818-4c5618e93902 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.045345] env[69328]: DEBUG nova.compute.provider_tree [None req-616dd6ec-6084-4db0-b933-142e7552947e tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1177.254169] env[69328]: DEBUG oslo_vmware.api [None req-964d019d-cedd-4e66-944c-bca5419fa197 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274238, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.459349] env[69328]: DEBUG nova.compute.manager [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1177.468240] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ca90fe72-e672-4d59-9f2b-220d0fec317b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.483189] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5274c67-6384-4b10-b821-43eb01bfb73c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.497024] env[69328]: DEBUG oslo_vmware.api [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': task-3274239, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074066} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1177.498248] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1177.499110] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ba9557f-3ae3-49bc-9b15-6b4267afb27e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.518826] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Reconfiguring VM instance instance-00000076 to attach disk [datastore2] 53eb70f0-1734-4386-b747-014561ba577b/53eb70f0-1734-4386-b747-014561ba577b.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1177.532662] env[69328]: INFO nova.compute.manager [-] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Took 1.67 seconds to deallocate network for instance. [ 1177.532929] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dd6f8027-f04f-46ce-9c6f-bd0cdc64a538 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.549818] env[69328]: DEBUG nova.compute.manager [req-9e5db71f-a14f-4ad6-ad39-24fbb57f6b32 req-7903fb3d-7053-407d-a137-fcf80b668ebb service nova] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Detach interface failed, port_id=7843ca64-fb43-4866-9bd7-f10b7c7e085e, reason: Instance 1413dcfe-3570-4657-b811-81a1acc159d1 could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1177.551287] env[69328]: DEBUG nova.network.neutron [req-56b6bce7-a122-4d35-bc1c-4d876a98aa74 req-772b5bfd-0de1-43ff-a97b-17c8c0dad458 service nova] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Updated VIF entry in instance network info cache for port 571eed05-9f96-46fe-9592-59e38c00196c. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1177.551863] env[69328]: DEBUG nova.network.neutron [req-56b6bce7-a122-4d35-bc1c-4d876a98aa74 req-772b5bfd-0de1-43ff-a97b-17c8c0dad458 service nova] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Updating instance_info_cache with network_info: [{"id": "509b2377-84e7-48a6-b2ed-811f288cc65c", "address": "fa:16:3e:f8:6d:5c", "network": {"id": "aed15283-4a79-4e99-8b6c-49cf754138de", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1271042528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30209bc93a4042488f15c73b7e4733d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap509b2377-84", "ovs_interfaceid": "509b2377-84e7-48a6-b2ed-811f288cc65c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "571eed05-9f96-46fe-9592-59e38c00196c", "address": "fa:16:3e:dc:0e:cb", "network": {"id": "aed15283-4a79-4e99-8b6c-49cf754138de", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1271042528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30209bc93a4042488f15c73b7e4733d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap571eed05-9f", "ovs_interfaceid": "571eed05-9f96-46fe-9592-59e38c00196c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1177.557466] env[69328]: DEBUG nova.scheduler.client.report [None req-616dd6ec-6084-4db0-b933-142e7552947e tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1177.567942] env[69328]: DEBUG oslo_vmware.api [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Waiting for the task: (returnval){ [ 1177.567942] env[69328]: value = "task-3274240" [ 1177.567942] env[69328]: _type = "Task" [ 1177.567942] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.579979] env[69328]: DEBUG oslo_vmware.api [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': task-3274240, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.752556] env[69328]: DEBUG oslo_vmware.api [None req-964d019d-cedd-4e66-944c-bca5419fa197 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274238, 'name': ReconfigVM_Task, 'duration_secs': 0.745073} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1177.753135] env[69328]: DEBUG oslo_concurrency.lockutils [None req-964d019d-cedd-4e66-944c-bca5419fa197 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Releasing lock "33583ef3-252c-45d4-a514-5646f98c5f45" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1177.753540] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-964d019d-cedd-4e66-944c-bca5419fa197 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Reconfigured VM to attach interface {{(pid=69328) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1177.981407] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1178.061464] env[69328]: DEBUG oslo_concurrency.lockutils [req-56b6bce7-a122-4d35-bc1c-4d876a98aa74 req-772b5bfd-0de1-43ff-a97b-17c8c0dad458 service nova] Releasing lock "refresh_cache-33583ef3-252c-45d4-a514-5646f98c5f45" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1178.062800] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7cf9fea3-4624-4630-9dea-8151efd937d0 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1178.063528] env[69328]: DEBUG oslo_concurrency.lockutils [None req-616dd6ec-6084-4db0-b933-142e7552947e tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.813s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1178.066183] env[69328]: DEBUG oslo_concurrency.lockutils [None req-950fb797-d42a-4da3-923d-c9b6a0f13ea5 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 4.188s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1178.082755] env[69328]: DEBUG oslo_vmware.api [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': task-3274240, 'name': ReconfigVM_Task, 'duration_secs': 0.360881} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1178.082755] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Reconfigured VM instance instance-00000076 to attach disk [datastore2] 53eb70f0-1734-4386-b747-014561ba577b/53eb70f0-1734-4386-b747-014561ba577b.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1178.083309] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-81e73834-4b7f-40aa-bf3e-75bb5d7fde1d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.096289] env[69328]: DEBUG oslo_vmware.api [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Waiting for the task: (returnval){ [ 1178.096289] env[69328]: value = "task-3274241" [ 1178.096289] env[69328]: _type = "Task" [ 1178.096289] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.106640] env[69328]: DEBUG oslo_vmware.api [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': task-3274241, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.265074] env[69328]: DEBUG oslo_concurrency.lockutils [None req-964d019d-cedd-4e66-944c-bca5419fa197 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "interface-33583ef3-252c-45d4-a514-5646f98c5f45-571eed05-9f96-46fe-9592-59e38c00196c" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.213s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1178.607748] env[69328]: DEBUG oslo_concurrency.lockutils [None req-616dd6ec-6084-4db0-b933-142e7552947e tempest-ServerActionsV293TestJSON-277494671 tempest-ServerActionsV293TestJSON-277494671-project-member] Lock "5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.699s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1178.621417] env[69328]: DEBUG oslo_vmware.api [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': task-3274241, 'name': Rename_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.631845] env[69328]: DEBUG oslo_concurrency.lockutils [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Acquiring lock "ae46c18e-15ae-4a47-b05a-a143f10b5ab6" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1178.632181] env[69328]: DEBUG oslo_concurrency.lockutils [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lock "ae46c18e-15ae-4a47-b05a-a143f10b5ab6" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1178.632369] env[69328]: INFO nova.compute.manager [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Shelving [ 1178.844025] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91095607-a1a1-4593-a79b-49b7b6700235 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.852187] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73111fac-b5e0-4b22-90a9-faf7d7a123ca {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.884978] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f554f0a-28f2-4ade-b226-39f0524e380c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.894253] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abb1e4d7-b3fe-4cd4-b075-ca4c7c143d18 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.910309] env[69328]: DEBUG nova.compute.provider_tree [None req-950fb797-d42a-4da3-923d-c9b6a0f13ea5 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1179.002861] env[69328]: DEBUG nova.compute.manager [req-f0e9f15c-53cd-45cd-8082-0bbcc2efd013 req-40c21735-c568-4150-8897-620bb2f0eaa2 service nova] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Received event network-changed-d76b0cd3-fa46-430c-b29d-7439c7857ba3 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1179.003224] env[69328]: DEBUG nova.compute.manager [req-f0e9f15c-53cd-45cd-8082-0bbcc2efd013 req-40c21735-c568-4150-8897-620bb2f0eaa2 service nova] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Refreshing instance network info cache due to event network-changed-d76b0cd3-fa46-430c-b29d-7439c7857ba3. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1179.003301] env[69328]: DEBUG oslo_concurrency.lockutils [req-f0e9f15c-53cd-45cd-8082-0bbcc2efd013 req-40c21735-c568-4150-8897-620bb2f0eaa2 service nova] Acquiring lock "refresh_cache-0cf68559-5f07-4006-9f7f-59027e31635d" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1179.003441] env[69328]: DEBUG oslo_concurrency.lockutils [req-f0e9f15c-53cd-45cd-8082-0bbcc2efd013 req-40c21735-c568-4150-8897-620bb2f0eaa2 service nova] Acquired lock "refresh_cache-0cf68559-5f07-4006-9f7f-59027e31635d" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1179.003595] env[69328]: DEBUG nova.network.neutron [req-f0e9f15c-53cd-45cd-8082-0bbcc2efd013 req-40c21735-c568-4150-8897-620bb2f0eaa2 service nova] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Refreshing network info cache for port d76b0cd3-fa46-430c-b29d-7439c7857ba3 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1179.110361] env[69328]: DEBUG oslo_vmware.api [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': task-3274241, 'name': Rename_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.414385] env[69328]: DEBUG nova.scheduler.client.report [None req-950fb797-d42a-4da3-923d-c9b6a0f13ea5 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1179.611387] env[69328]: DEBUG oslo_vmware.api [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': task-3274241, 'name': Rename_Task, 'duration_secs': 1.175954} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.613832] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1179.614111] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b305e0b5-533c-4fcd-a316-5b7c10f16247 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.621092] env[69328]: DEBUG oslo_vmware.api [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Waiting for the task: (returnval){ [ 1179.621092] env[69328]: value = "task-3274242" [ 1179.621092] env[69328]: _type = "Task" [ 1179.621092] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.628719] env[69328]: DEBUG oslo_vmware.api [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': task-3274242, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.641547] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1179.641716] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6c3e7c20-fe79-484a-8938-71aaad0083a7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.649185] env[69328]: DEBUG oslo_vmware.api [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 1179.649185] env[69328]: value = "task-3274243" [ 1179.649185] env[69328]: _type = "Task" [ 1179.649185] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.658378] env[69328]: DEBUG oslo_vmware.api [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274243, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.738015] env[69328]: DEBUG nova.network.neutron [req-f0e9f15c-53cd-45cd-8082-0bbcc2efd013 req-40c21735-c568-4150-8897-620bb2f0eaa2 service nova] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Updated VIF entry in instance network info cache for port d76b0cd3-fa46-430c-b29d-7439c7857ba3. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1179.738480] env[69328]: DEBUG nova.network.neutron [req-f0e9f15c-53cd-45cd-8082-0bbcc2efd013 req-40c21735-c568-4150-8897-620bb2f0eaa2 service nova] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Updating instance_info_cache with network_info: [{"id": "d76b0cd3-fa46-430c-b29d-7439c7857ba3", "address": "fa:16:3e:0f:95:97", "network": {"id": "238bfce5-9117-4d8e-8dd7-445d8d894599", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-231958017-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8bbb75992830459c85c818e850261c61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3d7e184-c87f-47a5-8d0d-9fa20e07e669", "external-id": "nsx-vlan-transportzone-746", "segmentation_id": 746, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd76b0cd3-fa", "ovs_interfaceid": "d76b0cd3-fa46-430c-b29d-7439c7857ba3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1179.882666] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9966fe39-a7d5-4e09-95d5-beefe8e318f8 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "interface-33583ef3-252c-45d4-a514-5646f98c5f45-571eed05-9f96-46fe-9592-59e38c00196c" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1179.882938] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9966fe39-a7d5-4e09-95d5-beefe8e318f8 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "interface-33583ef3-252c-45d4-a514-5646f98c5f45-571eed05-9f96-46fe-9592-59e38c00196c" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1180.131590] env[69328]: DEBUG oslo_vmware.api [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': task-3274242, 'name': PowerOnVM_Task, 'duration_secs': 0.452208} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.131793] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1180.131997] env[69328]: DEBUG nova.compute.manager [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1180.132757] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab64872e-142d-468b-8356-45bd13039f25 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.158345] env[69328]: DEBUG oslo_vmware.api [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274243, 'name': PowerOffVM_Task, 'duration_secs': 0.222937} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.158588] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1180.159353] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-003303fb-2df1-4de1-b20d-b66cfd862cbc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.178394] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cf67309-c75e-416c-bbe7-5153d4911a66 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.241816] env[69328]: DEBUG oslo_concurrency.lockutils [req-f0e9f15c-53cd-45cd-8082-0bbcc2efd013 req-40c21735-c568-4150-8897-620bb2f0eaa2 service nova] Releasing lock "refresh_cache-0cf68559-5f07-4006-9f7f-59027e31635d" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1180.386411] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9966fe39-a7d5-4e09-95d5-beefe8e318f8 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "33583ef3-252c-45d4-a514-5646f98c5f45" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1180.386411] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9966fe39-a7d5-4e09-95d5-beefe8e318f8 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquired lock "33583ef3-252c-45d4-a514-5646f98c5f45" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1180.387130] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a70147a-4b95-4974-8b48-214a75ea09c1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.407186] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcb4335d-c59c-46ae-9e05-b1169ef6236e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.428381] env[69328]: DEBUG oslo_concurrency.lockutils [None req-950fb797-d42a-4da3-923d-c9b6a0f13ea5 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.362s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1180.436634] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9966fe39-a7d5-4e09-95d5-beefe8e318f8 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Reconfiguring VM to detach interface {{(pid=69328) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1180.438025] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.456s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1180.438697] env[69328]: INFO nova.compute.claims [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1180.441263] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-99fe15ff-63f9-43cf-bec5-15debe6f9fb2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.462032] env[69328]: DEBUG oslo_vmware.api [None req-9966fe39-a7d5-4e09-95d5-beefe8e318f8 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for the task: (returnval){ [ 1180.462032] env[69328]: value = "task-3274244" [ 1180.462032] env[69328]: _type = "Task" [ 1180.462032] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.475128] env[69328]: DEBUG oslo_vmware.api [None req-9966fe39-a7d5-4e09-95d5-beefe8e318f8 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274244, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.650030] env[69328]: DEBUG oslo_concurrency.lockutils [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1180.689151] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Creating Snapshot of the VM instance {{(pid=69328) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1180.689484] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-9b408700-665d-4398-8c6e-91e126b70086 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.697964] env[69328]: DEBUG oslo_vmware.api [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 1180.697964] env[69328]: value = "task-3274245" [ 1180.697964] env[69328]: _type = "Task" [ 1180.697964] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.706819] env[69328]: DEBUG oslo_vmware.api [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274245, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.974953] env[69328]: DEBUG oslo_vmware.api [None req-9966fe39-a7d5-4e09-95d5-beefe8e318f8 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274244, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.002374] env[69328]: INFO nova.scheduler.client.report [None req-950fb797-d42a-4da3-923d-c9b6a0f13ea5 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Deleted allocation for migration c892a2f2-f997-40ad-a8ba-b86ecfe7a6fb [ 1181.209715] env[69328]: DEBUG oslo_vmware.api [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274245, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.225634] env[69328]: DEBUG oslo_concurrency.lockutils [None req-66532237-f594-450a-b6b1-6c22da384129 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Acquiring lock "53eb70f0-1734-4386-b747-014561ba577b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1181.225634] env[69328]: DEBUG oslo_concurrency.lockutils [None req-66532237-f594-450a-b6b1-6c22da384129 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Lock "53eb70f0-1734-4386-b747-014561ba577b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1181.225634] env[69328]: DEBUG oslo_concurrency.lockutils [None req-66532237-f594-450a-b6b1-6c22da384129 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Acquiring lock "53eb70f0-1734-4386-b747-014561ba577b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1181.225634] env[69328]: DEBUG oslo_concurrency.lockutils [None req-66532237-f594-450a-b6b1-6c22da384129 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Lock "53eb70f0-1734-4386-b747-014561ba577b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1181.225634] env[69328]: DEBUG oslo_concurrency.lockutils [None req-66532237-f594-450a-b6b1-6c22da384129 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Lock "53eb70f0-1734-4386-b747-014561ba577b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1181.228396] env[69328]: INFO nova.compute.manager [None req-66532237-f594-450a-b6b1-6c22da384129 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Terminating instance [ 1181.472782] env[69328]: DEBUG oslo_vmware.api [None req-9966fe39-a7d5-4e09-95d5-beefe8e318f8 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274244, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.510376] env[69328]: DEBUG oslo_concurrency.lockutils [None req-950fb797-d42a-4da3-923d-c9b6a0f13ea5 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "f1be93b2-08db-41fe-87c4-f4e5f964cfa4" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 11.068s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1181.637370] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b87d74c0-083b-4ab4-88b0-dff9687f6c47 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.645529] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c12eabe-4a92-461a-878b-995056e0cdc7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.675480] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11477fa3-f1cb-44b1-bff8-df65a4ec545b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.682885] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f6b06e2-117b-407c-8c16-a73efec2bfa8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.696046] env[69328]: DEBUG nova.compute.provider_tree [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1181.709399] env[69328]: DEBUG oslo_vmware.api [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274245, 'name': CreateSnapshot_Task, 'duration_secs': 0.552864} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.710283] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Created Snapshot of the VM instance {{(pid=69328) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1181.711157] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e40fd61a-489c-4517-ad02-a69618365873 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.732037] env[69328]: DEBUG oslo_concurrency.lockutils [None req-66532237-f594-450a-b6b1-6c22da384129 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Acquiring lock "refresh_cache-53eb70f0-1734-4386-b747-014561ba577b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1181.732238] env[69328]: DEBUG oslo_concurrency.lockutils [None req-66532237-f594-450a-b6b1-6c22da384129 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Acquired lock "refresh_cache-53eb70f0-1734-4386-b747-014561ba577b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1181.732410] env[69328]: DEBUG nova.network.neutron [None req-66532237-f594-450a-b6b1-6c22da384129 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1181.865226] env[69328]: DEBUG oslo_concurrency.lockutils [None req-833da309-8944-4b8e-8888-5fc9af458f0b tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "f1be93b2-08db-41fe-87c4-f4e5f964cfa4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1181.865515] env[69328]: DEBUG oslo_concurrency.lockutils [None req-833da309-8944-4b8e-8888-5fc9af458f0b tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "f1be93b2-08db-41fe-87c4-f4e5f964cfa4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1181.865714] env[69328]: DEBUG oslo_concurrency.lockutils [None req-833da309-8944-4b8e-8888-5fc9af458f0b tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "f1be93b2-08db-41fe-87c4-f4e5f964cfa4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1181.865897] env[69328]: DEBUG oslo_concurrency.lockutils [None req-833da309-8944-4b8e-8888-5fc9af458f0b tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "f1be93b2-08db-41fe-87c4-f4e5f964cfa4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1181.866083] env[69328]: DEBUG oslo_concurrency.lockutils [None req-833da309-8944-4b8e-8888-5fc9af458f0b tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "f1be93b2-08db-41fe-87c4-f4e5f964cfa4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1181.868231] env[69328]: INFO nova.compute.manager [None req-833da309-8944-4b8e-8888-5fc9af458f0b tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Terminating instance [ 1181.974199] env[69328]: DEBUG oslo_vmware.api [None req-9966fe39-a7d5-4e09-95d5-beefe8e318f8 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274244, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.199213] env[69328]: DEBUG nova.scheduler.client.report [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1182.229381] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Creating linked-clone VM from snapshot {{(pid=69328) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1182.229883] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-c9792822-f490-4243-b621-a307572b2ecb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.242037] env[69328]: DEBUG oslo_vmware.api [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 1182.242037] env[69328]: value = "task-3274246" [ 1182.242037] env[69328]: _type = "Task" [ 1182.242037] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.252630] env[69328]: DEBUG oslo_vmware.api [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274246, 'name': CloneVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.253679] env[69328]: DEBUG nova.network.neutron [None req-66532237-f594-450a-b6b1-6c22da384129 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1182.311297] env[69328]: DEBUG nova.network.neutron [None req-66532237-f594-450a-b6b1-6c22da384129 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1182.371723] env[69328]: DEBUG nova.compute.manager [None req-833da309-8944-4b8e-8888-5fc9af458f0b tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1182.371723] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-833da309-8944-4b8e-8888-5fc9af458f0b tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1182.372891] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95080e7c-4c1f-4e29-be9c-376d1e3f9e5e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.381981] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-833da309-8944-4b8e-8888-5fc9af458f0b tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1182.382275] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-69bca756-8715-4280-886c-63d1ce9477fa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.390417] env[69328]: DEBUG oslo_vmware.api [None req-833da309-8944-4b8e-8888-5fc9af458f0b tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1182.390417] env[69328]: value = "task-3274247" [ 1182.390417] env[69328]: _type = "Task" [ 1182.390417] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.401685] env[69328]: DEBUG oslo_vmware.api [None req-833da309-8944-4b8e-8888-5fc9af458f0b tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274247, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.475613] env[69328]: DEBUG oslo_vmware.api [None req-9966fe39-a7d5-4e09-95d5-beefe8e318f8 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274244, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.704716] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.267s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1182.705378] env[69328]: DEBUG nova.compute.manager [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1182.708408] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7cf9fea3-4624-4630-9dea-8151efd937d0 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.646s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1182.709248] env[69328]: DEBUG nova.objects.instance [None req-7cf9fea3-4624-4630-9dea-8151efd937d0 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Lazy-loading 'resources' on Instance uuid 1413dcfe-3570-4657-b811-81a1acc159d1 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1182.753748] env[69328]: DEBUG oslo_vmware.api [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274246, 'name': CloneVM_Task} progress is 94%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.814216] env[69328]: DEBUG oslo_concurrency.lockutils [None req-66532237-f594-450a-b6b1-6c22da384129 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Releasing lock "refresh_cache-53eb70f0-1734-4386-b747-014561ba577b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1182.814814] env[69328]: DEBUG nova.compute.manager [None req-66532237-f594-450a-b6b1-6c22da384129 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1182.815090] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-66532237-f594-450a-b6b1-6c22da384129 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1182.816184] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fae8aeb2-24e9-4a02-83b8-b3bf5912fea1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.825634] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-66532237-f594-450a-b6b1-6c22da384129 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1182.825944] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ae5df996-ffda-49b6-82c4-ae958b18aeb5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.834986] env[69328]: DEBUG oslo_vmware.api [None req-66532237-f594-450a-b6b1-6c22da384129 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Waiting for the task: (returnval){ [ 1182.834986] env[69328]: value = "task-3274248" [ 1182.834986] env[69328]: _type = "Task" [ 1182.834986] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.845150] env[69328]: DEBUG oslo_vmware.api [None req-66532237-f594-450a-b6b1-6c22da384129 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': task-3274248, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.901626] env[69328]: DEBUG oslo_vmware.api [None req-833da309-8944-4b8e-8888-5fc9af458f0b tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274247, 'name': PowerOffVM_Task, 'duration_secs': 0.247112} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.902017] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-833da309-8944-4b8e-8888-5fc9af458f0b tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1182.902265] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-833da309-8944-4b8e-8888-5fc9af458f0b tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1182.902497] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-878cef8c-ebbc-451d-9ada-5c32512600be {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.977108] env[69328]: DEBUG oslo_vmware.api [None req-9966fe39-a7d5-4e09-95d5-beefe8e318f8 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274244, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.984995] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-833da309-8944-4b8e-8888-5fc9af458f0b tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1182.985281] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-833da309-8944-4b8e-8888-5fc9af458f0b tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1182.985488] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-833da309-8944-4b8e-8888-5fc9af458f0b tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Deleting the datastore file [datastore2] f1be93b2-08db-41fe-87c4-f4e5f964cfa4 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1182.985792] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-16e601ec-9348-41a5-b3e2-532418b82f28 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.993911] env[69328]: DEBUG oslo_vmware.api [None req-833da309-8944-4b8e-8888-5fc9af458f0b tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1182.993911] env[69328]: value = "task-3274250" [ 1182.993911] env[69328]: _type = "Task" [ 1182.993911] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.004907] env[69328]: DEBUG oslo_vmware.api [None req-833da309-8944-4b8e-8888-5fc9af458f0b tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274250, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.017768] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e81b1919-256d-44b9-905b-12a016edcea4 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquiring lock "ff815ffb-3422-469e-9b54-b33502826513" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1183.018063] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e81b1919-256d-44b9-905b-12a016edcea4 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "ff815ffb-3422-469e-9b54-b33502826513" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1183.211900] env[69328]: DEBUG nova.compute.utils [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1183.216986] env[69328]: DEBUG nova.compute.manager [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1183.216986] env[69328]: DEBUG nova.network.neutron [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1183.256289] env[69328]: DEBUG oslo_vmware.api [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274246, 'name': CloneVM_Task} progress is 95%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.261901] env[69328]: DEBUG nova.policy [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2be0cfdc98ee4199a8df31f70faa4b49', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '278be2f8452946b9ab9c4bce8f9a7557', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 1183.347758] env[69328]: DEBUG oslo_vmware.api [None req-66532237-f594-450a-b6b1-6c22da384129 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': task-3274248, 'name': PowerOffVM_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.421922] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-050b376f-60a9-4cbd-9add-188f4b01c25f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.429924] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-289ad595-d674-49ba-92c9-5d4adca5b4c2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.462933] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0b24245-565f-4934-a9c6-6882846f9701 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.472210] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ed6091b-ed6a-429e-b4a2-4a4b85ac786e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.479092] env[69328]: DEBUG oslo_vmware.api [None req-9966fe39-a7d5-4e09-95d5-beefe8e318f8 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274244, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.491201] env[69328]: DEBUG nova.compute.provider_tree [None req-7cf9fea3-4624-4630-9dea-8151efd937d0 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1183.504242] env[69328]: DEBUG oslo_vmware.api [None req-833da309-8944-4b8e-8888-5fc9af458f0b tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274250, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.156402} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.505266] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-833da309-8944-4b8e-8888-5fc9af458f0b tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1183.505461] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-833da309-8944-4b8e-8888-5fc9af458f0b tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1183.505639] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-833da309-8944-4b8e-8888-5fc9af458f0b tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1183.505815] env[69328]: INFO nova.compute.manager [None req-833da309-8944-4b8e-8888-5fc9af458f0b tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1183.506067] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-833da309-8944-4b8e-8888-5fc9af458f0b tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1183.506272] env[69328]: DEBUG nova.compute.manager [-] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1183.506442] env[69328]: DEBUG nova.network.neutron [-] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1183.521549] env[69328]: DEBUG nova.compute.utils [None req-e81b1919-256d-44b9-905b-12a016edcea4 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1183.619816] env[69328]: DEBUG nova.network.neutron [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Successfully created port: 69e73394-845a-4108-8b2f-6b23a000d98c {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1183.722211] env[69328]: DEBUG nova.compute.manager [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1183.756298] env[69328]: DEBUG oslo_vmware.api [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274246, 'name': CloneVM_Task, 'duration_secs': 1.174433} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.756298] env[69328]: INFO nova.virt.vmwareapi.vmops [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Created linked-clone VM from snapshot [ 1183.758052] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa328c95-453e-4baa-881f-835539d07cfe {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.767617] env[69328]: DEBUG nova.virt.vmwareapi.images [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Uploading image 025a677f-beea-4695-85ea-28c156879ab9 {{(pid=69328) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1183.795649] env[69328]: DEBUG oslo_vmware.rw_handles [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1183.795649] env[69328]: value = "vm-653966" [ 1183.795649] env[69328]: _type = "VirtualMachine" [ 1183.795649] env[69328]: }. {{(pid=69328) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1183.796420] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-13e3518d-6225-40a8-9836-7af3ba5cb327 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.805707] env[69328]: DEBUG oslo_vmware.rw_handles [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lease: (returnval){ [ 1183.805707] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52de0595-538b-8520-fed8-04c98c2d36b9" [ 1183.805707] env[69328]: _type = "HttpNfcLease" [ 1183.805707] env[69328]: } obtained for exporting VM: (result){ [ 1183.805707] env[69328]: value = "vm-653966" [ 1183.805707] env[69328]: _type = "VirtualMachine" [ 1183.805707] env[69328]: }. {{(pid=69328) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1183.806527] env[69328]: DEBUG oslo_vmware.api [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the lease: (returnval){ [ 1183.806527] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52de0595-538b-8520-fed8-04c98c2d36b9" [ 1183.806527] env[69328]: _type = "HttpNfcLease" [ 1183.806527] env[69328]: } to be ready. {{(pid=69328) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1183.814392] env[69328]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1183.814392] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52de0595-538b-8520-fed8-04c98c2d36b9" [ 1183.814392] env[69328]: _type = "HttpNfcLease" [ 1183.814392] env[69328]: } is initializing. {{(pid=69328) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1183.847656] env[69328]: DEBUG oslo_vmware.api [None req-66532237-f594-450a-b6b1-6c22da384129 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': task-3274248, 'name': PowerOffVM_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.976907] env[69328]: DEBUG oslo_vmware.api [None req-9966fe39-a7d5-4e09-95d5-beefe8e318f8 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274244, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.018920] env[69328]: ERROR nova.scheduler.client.report [None req-7cf9fea3-4624-4630-9dea-8151efd937d0 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [req-bcc974c7-e97c-45d0-89b7-913200e5a35c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-bcc974c7-e97c-45d0-89b7-913200e5a35c"}]} [ 1184.025077] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e81b1919-256d-44b9-905b-12a016edcea4 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "ff815ffb-3422-469e-9b54-b33502826513" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1184.040961] env[69328]: DEBUG nova.scheduler.client.report [None req-7cf9fea3-4624-4630-9dea-8151efd937d0 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Refreshing inventories for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1184.063918] env[69328]: DEBUG nova.scheduler.client.report [None req-7cf9fea3-4624-4630-9dea-8151efd937d0 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Updating ProviderTree inventory for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1184.063918] env[69328]: DEBUG nova.compute.provider_tree [None req-7cf9fea3-4624-4630-9dea-8151efd937d0 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 115, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1184.080834] env[69328]: DEBUG nova.scheduler.client.report [None req-7cf9fea3-4624-4630-9dea-8151efd937d0 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Refreshing aggregate associations for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e, aggregates: None {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1184.105178] env[69328]: DEBUG nova.scheduler.client.report [None req-7cf9fea3-4624-4630-9dea-8151efd937d0 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Refreshing trait associations for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e, traits: COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1184.185486] env[69328]: DEBUG nova.compute.manager [req-810cfbc2-0cdd-4c5a-96a1-9763a9805201 req-dc6c19bf-4df2-4002-9c3d-be1ed6acaf67 service nova] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Received event network-vif-deleted-1018560a-13d7-4d01-8fc4-03d0b9beab90 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1184.185486] env[69328]: INFO nova.compute.manager [req-810cfbc2-0cdd-4c5a-96a1-9763a9805201 req-dc6c19bf-4df2-4002-9c3d-be1ed6acaf67 service nova] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Neutron deleted interface 1018560a-13d7-4d01-8fc4-03d0b9beab90; detaching it from the instance and deleting it from the info cache [ 1184.185486] env[69328]: DEBUG nova.network.neutron [req-810cfbc2-0cdd-4c5a-96a1-9763a9805201 req-dc6c19bf-4df2-4002-9c3d-be1ed6acaf67 service nova] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1184.312140] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f1ce5bd-cfe5-4248-8b4e-d2e80071cdb1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.320397] env[69328]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1184.320397] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52de0595-538b-8520-fed8-04c98c2d36b9" [ 1184.320397] env[69328]: _type = "HttpNfcLease" [ 1184.320397] env[69328]: } is ready. {{(pid=69328) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1184.320747] env[69328]: DEBUG oslo_vmware.rw_handles [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1184.320747] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52de0595-538b-8520-fed8-04c98c2d36b9" [ 1184.320747] env[69328]: _type = "HttpNfcLease" [ 1184.320747] env[69328]: }. {{(pid=69328) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1184.321682] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d790e3ea-cdc3-487d-8eb3-270726b6d4eb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.325111] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b3fdf10-bd74-4089-a394-876e50466e42 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.333247] env[69328]: DEBUG oslo_vmware.rw_handles [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52916fc6-21d4-94e3-2b08-cf155555566d/disk-0.vmdk from lease info. {{(pid=69328) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1184.333429] env[69328]: DEBUG oslo_vmware.rw_handles [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52916fc6-21d4-94e3-2b08-cf155555566d/disk-0.vmdk for reading. {{(pid=69328) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1184.363872] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4820d454-bf08-4ef9-a5e2-dab30b6fb601 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.428365] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-896ba5b0-6413-4171-ae41-a9181cd9a9f4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.432627] env[69328]: DEBUG oslo_vmware.api [None req-66532237-f594-450a-b6b1-6c22da384129 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': task-3274248, 'name': PowerOffVM_Task, 'duration_secs': 1.169504} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.433188] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-66532237-f594-450a-b6b1-6c22da384129 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1184.433369] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-66532237-f594-450a-b6b1-6c22da384129 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1184.433894] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-53ce4a7c-5667-45cf-9e08-362b90a7f47d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.443267] env[69328]: DEBUG nova.compute.provider_tree [None req-7cf9fea3-4624-4630-9dea-8151efd937d0 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1184.464436] env[69328]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-2709f135-546c-4bba-bcc3-7761498240b5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.466370] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-66532237-f594-450a-b6b1-6c22da384129 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1184.466585] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-66532237-f594-450a-b6b1-6c22da384129 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1184.466762] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-66532237-f594-450a-b6b1-6c22da384129 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Deleting the datastore file [datastore2] 53eb70f0-1734-4386-b747-014561ba577b {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1184.467144] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e6e35484-adfc-4e71-b6bc-9aad896eb659 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.477637] env[69328]: DEBUG oslo_vmware.api [None req-66532237-f594-450a-b6b1-6c22da384129 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Waiting for the task: (returnval){ [ 1184.477637] env[69328]: value = "task-3274253" [ 1184.477637] env[69328]: _type = "Task" [ 1184.477637] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.482128] env[69328]: DEBUG oslo_vmware.api [None req-9966fe39-a7d5-4e09-95d5-beefe8e318f8 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274244, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.491939] env[69328]: DEBUG oslo_vmware.api [None req-66532237-f594-450a-b6b1-6c22da384129 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': task-3274253, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.648934] env[69328]: DEBUG nova.network.neutron [-] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1184.687728] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b5568ec7-b96e-4545-a6f4-7f4c3e28fcba {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.701295] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-308f3b4e-3d2c-47b4-b321-4f8b586bb553 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.741024] env[69328]: DEBUG nova.compute.manager [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1184.744372] env[69328]: DEBUG nova.compute.manager [req-810cfbc2-0cdd-4c5a-96a1-9763a9805201 req-dc6c19bf-4df2-4002-9c3d-be1ed6acaf67 service nova] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Detach interface failed, port_id=1018560a-13d7-4d01-8fc4-03d0b9beab90, reason: Instance f1be93b2-08db-41fe-87c4-f4e5f964cfa4 could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1184.772044] env[69328]: DEBUG nova.virt.hardware [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1184.772438] env[69328]: DEBUG nova.virt.hardware [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1184.772686] env[69328]: DEBUG nova.virt.hardware [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1184.773075] env[69328]: DEBUG nova.virt.hardware [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1184.773739] env[69328]: DEBUG nova.virt.hardware [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1184.773739] env[69328]: DEBUG nova.virt.hardware [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1184.773739] env[69328]: DEBUG nova.virt.hardware [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1184.773937] env[69328]: DEBUG nova.virt.hardware [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1184.774067] env[69328]: DEBUG nova.virt.hardware [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1184.774358] env[69328]: DEBUG nova.virt.hardware [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1184.774550] env[69328]: DEBUG nova.virt.hardware [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1184.776332] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abcbb732-e58d-479c-bead-effa9c36a968 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.785978] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30806dc9-8838-4be7-a546-ff558c3ddd51 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.978614] env[69328]: DEBUG oslo_vmware.api [None req-9966fe39-a7d5-4e09-95d5-beefe8e318f8 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274244, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.979725] env[69328]: DEBUG nova.scheduler.client.report [None req-7cf9fea3-4624-4630-9dea-8151efd937d0 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Updated inventory for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with generation 165 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1184.980000] env[69328]: DEBUG nova.compute.provider_tree [None req-7cf9fea3-4624-4630-9dea-8151efd937d0 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Updating resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e generation from 165 to 166 during operation: update_inventory {{(pid=69328) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1184.980232] env[69328]: DEBUG nova.compute.provider_tree [None req-7cf9fea3-4624-4630-9dea-8151efd937d0 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1184.992476] env[69328]: DEBUG oslo_vmware.api [None req-66532237-f594-450a-b6b1-6c22da384129 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': task-3274253, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.099143} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.992881] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-66532237-f594-450a-b6b1-6c22da384129 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1184.993600] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-66532237-f594-450a-b6b1-6c22da384129 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1184.993600] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-66532237-f594-450a-b6b1-6c22da384129 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1184.993819] env[69328]: INFO nova.compute.manager [None req-66532237-f594-450a-b6b1-6c22da384129 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Took 2.18 seconds to destroy the instance on the hypervisor. [ 1184.993956] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-66532237-f594-450a-b6b1-6c22da384129 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1184.995267] env[69328]: DEBUG nova.compute.manager [-] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1184.995267] env[69328]: DEBUG nova.network.neutron [-] [instance: 53eb70f0-1734-4386-b747-014561ba577b] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1185.092375] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e81b1919-256d-44b9-905b-12a016edcea4 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquiring lock "ff815ffb-3422-469e-9b54-b33502826513" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1185.092964] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e81b1919-256d-44b9-905b-12a016edcea4 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "ff815ffb-3422-469e-9b54-b33502826513" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1185.093309] env[69328]: INFO nova.compute.manager [None req-e81b1919-256d-44b9-905b-12a016edcea4 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: ff815ffb-3422-469e-9b54-b33502826513] Attaching volume 7dec5135-3f2a-46ec-9d0d-3ae432471688 to /dev/sdb [ 1185.104872] env[69328]: DEBUG nova.network.neutron [-] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1185.136488] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dbf9e29-3f92-4ba8-97b7-2818c6aae612 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.146774] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaa35091-6509-469c-8f43-845e685f62ee {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.153866] env[69328]: INFO nova.compute.manager [-] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Took 1.65 seconds to deallocate network for instance. [ 1185.166396] env[69328]: DEBUG nova.virt.block_device [None req-e81b1919-256d-44b9-905b-12a016edcea4 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: ff815ffb-3422-469e-9b54-b33502826513] Updating existing volume attachment record: f5b7f6a6-d062-4774-aef8-b22d11b6b836 {{(pid=69328) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1185.375632] env[69328]: DEBUG nova.network.neutron [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Successfully updated port: 69e73394-845a-4108-8b2f-6b23a000d98c {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1185.480839] env[69328]: DEBUG oslo_vmware.api [None req-9966fe39-a7d5-4e09-95d5-beefe8e318f8 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274244, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.487967] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7cf9fea3-4624-4630-9dea-8151efd937d0 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.779s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1185.490521] env[69328]: DEBUG oslo_concurrency.lockutils [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 4.841s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1185.490710] env[69328]: DEBUG nova.objects.instance [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69328) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1185.519083] env[69328]: INFO nova.scheduler.client.report [None req-7cf9fea3-4624-4630-9dea-8151efd937d0 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Deleted allocations for instance 1413dcfe-3570-4657-b811-81a1acc159d1 [ 1185.612391] env[69328]: DEBUG nova.network.neutron [-] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1185.661219] env[69328]: DEBUG oslo_concurrency.lockutils [None req-833da309-8944-4b8e-8888-5fc9af458f0b tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1185.893192] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Acquiring lock "refresh_cache-d19f6a2a-3a16-4031-8c20-143ccfd6f5f5" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1185.893471] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Acquired lock "refresh_cache-d19f6a2a-3a16-4031-8c20-143ccfd6f5f5" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1185.893659] env[69328]: DEBUG nova.network.neutron [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1185.980242] env[69328]: DEBUG oslo_vmware.api [None req-9966fe39-a7d5-4e09-95d5-beefe8e318f8 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274244, 'name': ReconfigVM_Task} progress is 18%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.007034] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4133dd10-a312-4913-9cb5-781c38c58742 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "ee3609ea-0855-47c2-874c-349c80419781" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1186.007341] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4133dd10-a312-4913-9cb5-781c38c58742 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "ee3609ea-0855-47c2-874c-349c80419781" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1186.007714] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4133dd10-a312-4913-9cb5-781c38c58742 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "ee3609ea-0855-47c2-874c-349c80419781-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1186.007920] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4133dd10-a312-4913-9cb5-781c38c58742 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "ee3609ea-0855-47c2-874c-349c80419781-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1186.008190] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4133dd10-a312-4913-9cb5-781c38c58742 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "ee3609ea-0855-47c2-874c-349c80419781-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1186.010199] env[69328]: INFO nova.compute.manager [None req-4133dd10-a312-4913-9cb5-781c38c58742 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Terminating instance [ 1186.030131] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7cf9fea3-4624-4630-9dea-8151efd937d0 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Lock "1413dcfe-3570-4657-b811-81a1acc159d1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.828s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1186.115086] env[69328]: INFO nova.compute.manager [-] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Took 1.12 seconds to deallocate network for instance. [ 1186.210672] env[69328]: DEBUG nova.compute.manager [req-d9000f13-00fd-4539-842f-39f2f851c6f3 req-33486a5e-6e81-4abf-a9d1-5e57d1737b1c service nova] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Received event network-vif-plugged-69e73394-845a-4108-8b2f-6b23a000d98c {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1186.210964] env[69328]: DEBUG oslo_concurrency.lockutils [req-d9000f13-00fd-4539-842f-39f2f851c6f3 req-33486a5e-6e81-4abf-a9d1-5e57d1737b1c service nova] Acquiring lock "d19f6a2a-3a16-4031-8c20-143ccfd6f5f5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1186.211209] env[69328]: DEBUG oslo_concurrency.lockutils [req-d9000f13-00fd-4539-842f-39f2f851c6f3 req-33486a5e-6e81-4abf-a9d1-5e57d1737b1c service nova] Lock "d19f6a2a-3a16-4031-8c20-143ccfd6f5f5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1186.211454] env[69328]: DEBUG oslo_concurrency.lockutils [req-d9000f13-00fd-4539-842f-39f2f851c6f3 req-33486a5e-6e81-4abf-a9d1-5e57d1737b1c service nova] Lock "d19f6a2a-3a16-4031-8c20-143ccfd6f5f5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1186.211717] env[69328]: DEBUG nova.compute.manager [req-d9000f13-00fd-4539-842f-39f2f851c6f3 req-33486a5e-6e81-4abf-a9d1-5e57d1737b1c service nova] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] No waiting events found dispatching network-vif-plugged-69e73394-845a-4108-8b2f-6b23a000d98c {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1186.212042] env[69328]: WARNING nova.compute.manager [req-d9000f13-00fd-4539-842f-39f2f851c6f3 req-33486a5e-6e81-4abf-a9d1-5e57d1737b1c service nova] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Received unexpected event network-vif-plugged-69e73394-845a-4108-8b2f-6b23a000d98c for instance with vm_state building and task_state spawning. [ 1186.212249] env[69328]: DEBUG nova.compute.manager [req-d9000f13-00fd-4539-842f-39f2f851c6f3 req-33486a5e-6e81-4abf-a9d1-5e57d1737b1c service nova] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Received event network-changed-69e73394-845a-4108-8b2f-6b23a000d98c {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1186.212437] env[69328]: DEBUG nova.compute.manager [req-d9000f13-00fd-4539-842f-39f2f851c6f3 req-33486a5e-6e81-4abf-a9d1-5e57d1737b1c service nova] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Refreshing instance network info cache due to event network-changed-69e73394-845a-4108-8b2f-6b23a000d98c. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1186.212557] env[69328]: DEBUG oslo_concurrency.lockutils [req-d9000f13-00fd-4539-842f-39f2f851c6f3 req-33486a5e-6e81-4abf-a9d1-5e57d1737b1c service nova] Acquiring lock "refresh_cache-d19f6a2a-3a16-4031-8c20-143ccfd6f5f5" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1186.308370] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c4ad89fe-709f-4021-866f-c80eb03ee6b4 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquiring lock "c751ef77-c3be-46cd-b7eb-fe139bf0998b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1186.308640] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c4ad89fe-709f-4021-866f-c80eb03ee6b4 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Lock "c751ef77-c3be-46cd-b7eb-fe139bf0998b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1186.308847] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c4ad89fe-709f-4021-866f-c80eb03ee6b4 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquiring lock "c751ef77-c3be-46cd-b7eb-fe139bf0998b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1186.309050] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c4ad89fe-709f-4021-866f-c80eb03ee6b4 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Lock "c751ef77-c3be-46cd-b7eb-fe139bf0998b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1186.309228] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c4ad89fe-709f-4021-866f-c80eb03ee6b4 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Lock "c751ef77-c3be-46cd-b7eb-fe139bf0998b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1186.311915] env[69328]: INFO nova.compute.manager [None req-c4ad89fe-709f-4021-866f-c80eb03ee6b4 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Terminating instance [ 1186.444254] env[69328]: DEBUG nova.network.neutron [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1186.482279] env[69328]: DEBUG oslo_vmware.api [None req-9966fe39-a7d5-4e09-95d5-beefe8e318f8 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274244, 'name': ReconfigVM_Task, 'duration_secs': 5.773052} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.484787] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9966fe39-a7d5-4e09-95d5-beefe8e318f8 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Releasing lock "33583ef3-252c-45d4-a514-5646f98c5f45" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1186.485162] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-9966fe39-a7d5-4e09-95d5-beefe8e318f8 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Reconfigured VM to detach interface {{(pid=69328) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1186.500610] env[69328]: DEBUG oslo_concurrency.lockutils [None req-69667ff9-94e9-4619-b825-fb34194a9380 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.010s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1186.501866] env[69328]: DEBUG oslo_concurrency.lockutils [None req-833da309-8944-4b8e-8888-5fc9af458f0b tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.841s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1186.502160] env[69328]: DEBUG oslo_concurrency.lockutils [None req-833da309-8944-4b8e-8888-5fc9af458f0b tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1186.514358] env[69328]: DEBUG nova.compute.manager [None req-4133dd10-a312-4913-9cb5-781c38c58742 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1186.514358] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-4133dd10-a312-4913-9cb5-781c38c58742 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1186.517849] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5b033f53-5620-4946-8b4b-eb4812cce0b7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.523824] env[69328]: DEBUG oslo_vmware.api [None req-4133dd10-a312-4913-9cb5-781c38c58742 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 1186.523824] env[69328]: value = "task-3274255" [ 1186.523824] env[69328]: _type = "Task" [ 1186.523824] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.536987] env[69328]: DEBUG oslo_vmware.api [None req-4133dd10-a312-4913-9cb5-781c38c58742 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3274255, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.538063] env[69328]: INFO nova.scheduler.client.report [None req-833da309-8944-4b8e-8888-5fc9af458f0b tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Deleted allocations for instance f1be93b2-08db-41fe-87c4-f4e5f964cfa4 [ 1186.622175] env[69328]: DEBUG oslo_concurrency.lockutils [None req-66532237-f594-450a-b6b1-6c22da384129 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1186.622453] env[69328]: DEBUG oslo_concurrency.lockutils [None req-66532237-f594-450a-b6b1-6c22da384129 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1186.622695] env[69328]: DEBUG nova.objects.instance [None req-66532237-f594-450a-b6b1-6c22da384129 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Lazy-loading 'resources' on Instance uuid 53eb70f0-1734-4386-b747-014561ba577b {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1186.673266] env[69328]: DEBUG nova.network.neutron [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Updating instance_info_cache with network_info: [{"id": "69e73394-845a-4108-8b2f-6b23a000d98c", "address": "fa:16:3e:e9:f3:15", "network": {"id": "6d64fb31-1957-4722-a4b3-46b946bfb65a", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1232247602-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "278be2f8452946b9ab9c4bce8f9a7557", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5bd281ed-ae39-485f-90ee-4ee27994b5b0", "external-id": "nsx-vlan-transportzone-305", "segmentation_id": 305, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap69e73394-84", "ovs_interfaceid": "69e73394-845a-4108-8b2f-6b23a000d98c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1186.816448] env[69328]: DEBUG nova.compute.manager [None req-c4ad89fe-709f-4021-866f-c80eb03ee6b4 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1186.816680] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c4ad89fe-709f-4021-866f-c80eb03ee6b4 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1186.817637] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b2fd749-7a30-4e71-bb32-5420ac1e3c6b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.827239] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4ad89fe-709f-4021-866f-c80eb03ee6b4 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1186.827524] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dfda6623-fb15-41f7-b57e-9952d29346b6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.836253] env[69328]: DEBUG oslo_vmware.api [None req-c4ad89fe-709f-4021-866f-c80eb03ee6b4 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 1186.836253] env[69328]: value = "task-3274256" [ 1186.836253] env[69328]: _type = "Task" [ 1186.836253] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.845350] env[69328]: DEBUG oslo_vmware.api [None req-c4ad89fe-709f-4021-866f-c80eb03ee6b4 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3274256, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.035621] env[69328]: DEBUG oslo_vmware.api [None req-4133dd10-a312-4913-9cb5-781c38c58742 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3274255, 'name': PowerOffVM_Task, 'duration_secs': 0.273909} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.036019] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-4133dd10-a312-4913-9cb5-781c38c58742 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1187.036268] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-4133dd10-a312-4913-9cb5-781c38c58742 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Volume detach. Driver type: vmdk {{(pid=69328) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1187.036583] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-4133dd10-a312-4913-9cb5-781c38c58742 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653933', 'volume_id': 'a2de811d-614f-4456-ac21-52535c9e5fd6', 'name': 'volume-a2de811d-614f-4456-ac21-52535c9e5fd6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': 'ee3609ea-0855-47c2-874c-349c80419781', 'attached_at': '2025-04-03T17:45:05.000000', 'detached_at': '', 'volume_id': 'a2de811d-614f-4456-ac21-52535c9e5fd6', 'serial': 'a2de811d-614f-4456-ac21-52535c9e5fd6'} {{(pid=69328) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1187.037579] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fc3f0c8-bbe8-4093-8d36-336a89a10128 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.059258] env[69328]: DEBUG oslo_concurrency.lockutils [None req-833da309-8944-4b8e-8888-5fc9af458f0b tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "f1be93b2-08db-41fe-87c4-f4e5f964cfa4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.194s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1187.060937] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bb35628-4aec-4d59-afab-8fe476deba10 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.070132] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-906a48e7-f6d1-4646-81fe-b3fcaf5b3d21 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.090954] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d50238c0-8e0f-4e72-9c1c-33d5c33bdabf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.109424] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-4133dd10-a312-4913-9cb5-781c38c58742 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] The volume has not been displaced from its original location: [datastore2] volume-a2de811d-614f-4456-ac21-52535c9e5fd6/volume-a2de811d-614f-4456-ac21-52535c9e5fd6.vmdk. No consolidation needed. {{(pid=69328) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1187.114229] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-4133dd10-a312-4913-9cb5-781c38c58742 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Reconfiguring VM instance instance-0000006e to detach disk 2000 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1187.114570] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5184ea45-ae53-477f-8248-e9c41a27bb7f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.136489] env[69328]: DEBUG oslo_vmware.api [None req-4133dd10-a312-4913-9cb5-781c38c58742 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 1187.136489] env[69328]: value = "task-3274257" [ 1187.136489] env[69328]: _type = "Task" [ 1187.136489] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.146151] env[69328]: DEBUG oslo_vmware.api [None req-4133dd10-a312-4913-9cb5-781c38c58742 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3274257, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.178364] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Releasing lock "refresh_cache-d19f6a2a-3a16-4031-8c20-143ccfd6f5f5" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1187.178822] env[69328]: DEBUG nova.compute.manager [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Instance network_info: |[{"id": "69e73394-845a-4108-8b2f-6b23a000d98c", "address": "fa:16:3e:e9:f3:15", "network": {"id": "6d64fb31-1957-4722-a4b3-46b946bfb65a", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1232247602-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "278be2f8452946b9ab9c4bce8f9a7557", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5bd281ed-ae39-485f-90ee-4ee27994b5b0", "external-id": "nsx-vlan-transportzone-305", "segmentation_id": 305, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap69e73394-84", "ovs_interfaceid": "69e73394-845a-4108-8b2f-6b23a000d98c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1187.179966] env[69328]: DEBUG oslo_concurrency.lockutils [req-d9000f13-00fd-4539-842f-39f2f851c6f3 req-33486a5e-6e81-4abf-a9d1-5e57d1737b1c service nova] Acquired lock "refresh_cache-d19f6a2a-3a16-4031-8c20-143ccfd6f5f5" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1187.180298] env[69328]: DEBUG nova.network.neutron [req-d9000f13-00fd-4539-842f-39f2f851c6f3 req-33486a5e-6e81-4abf-a9d1-5e57d1737b1c service nova] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Refreshing network info cache for port 69e73394-845a-4108-8b2f-6b23a000d98c {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1187.181670] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e9:f3:15', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5bd281ed-ae39-485f-90ee-4ee27994b5b0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '69e73394-845a-4108-8b2f-6b23a000d98c', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1187.191357] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Creating folder: Project (278be2f8452946b9ab9c4bce8f9a7557). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1187.197695] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-57fa6e39-434f-4fd4-a374-e47f3814cd6b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.217540] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Created folder: Project (278be2f8452946b9ab9c4bce8f9a7557) in parent group-v653649. [ 1187.217856] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Creating folder: Instances. Parent ref: group-v653968. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1187.218047] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5931afc7-72f6-40e4-b2d5-6190b3c01d65 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.239264] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Created folder: Instances in parent group-v653968. [ 1187.239542] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1187.242381] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1187.243636] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-906302f6-98e7-48fe-a4de-8e323cb5ded0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.269642] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1187.269642] env[69328]: value = "task-3274260" [ 1187.269642] env[69328]: _type = "Task" [ 1187.269642] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.281669] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274260, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.333386] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-946c30d9-e163-4614-acc1-488995a091cd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.344081] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6f80063-634e-4248-bac5-7eb8d46bd98d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.351808] env[69328]: DEBUG oslo_vmware.api [None req-c4ad89fe-709f-4021-866f-c80eb03ee6b4 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3274256, 'name': PowerOffVM_Task, 'duration_secs': 0.257003} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.355277] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4ad89fe-709f-4021-866f-c80eb03ee6b4 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1187.355738] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c4ad89fe-709f-4021-866f-c80eb03ee6b4 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1187.355796] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1c8ba95f-de60-4df7-9388-ded186c5de13 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.387068] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d7fca9c-2b33-4caf-aa81-ec446677c8a2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.395143] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5afb4256-2918-4e23-873c-863294660ff3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.412942] env[69328]: DEBUG nova.compute.provider_tree [None req-66532237-f594-450a-b6b1-6c22da384129 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1187.454405] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c4ad89fe-709f-4021-866f-c80eb03ee6b4 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1187.454564] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c4ad89fe-709f-4021-866f-c80eb03ee6b4 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1187.454712] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4ad89fe-709f-4021-866f-c80eb03ee6b4 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Deleting the datastore file [datastore1] c751ef77-c3be-46cd-b7eb-fe139bf0998b {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1187.455531] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5fb860b1-e66e-4ce0-adf0-0c9732a6b4f3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.462963] env[69328]: DEBUG oslo_vmware.api [None req-c4ad89fe-709f-4021-866f-c80eb03ee6b4 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for the task: (returnval){ [ 1187.462963] env[69328]: value = "task-3274263" [ 1187.462963] env[69328]: _type = "Task" [ 1187.462963] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.472124] env[69328]: DEBUG oslo_vmware.api [None req-c4ad89fe-709f-4021-866f-c80eb03ee6b4 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3274263, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.488276] env[69328]: DEBUG nova.network.neutron [req-d9000f13-00fd-4539-842f-39f2f851c6f3 req-33486a5e-6e81-4abf-a9d1-5e57d1737b1c service nova] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Updated VIF entry in instance network info cache for port 69e73394-845a-4108-8b2f-6b23a000d98c. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1187.488663] env[69328]: DEBUG nova.network.neutron [req-d9000f13-00fd-4539-842f-39f2f851c6f3 req-33486a5e-6e81-4abf-a9d1-5e57d1737b1c service nova] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Updating instance_info_cache with network_info: [{"id": "69e73394-845a-4108-8b2f-6b23a000d98c", "address": "fa:16:3e:e9:f3:15", "network": {"id": "6d64fb31-1957-4722-a4b3-46b946bfb65a", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1232247602-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "278be2f8452946b9ab9c4bce8f9a7557", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5bd281ed-ae39-485f-90ee-4ee27994b5b0", "external-id": "nsx-vlan-transportzone-305", "segmentation_id": 305, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap69e73394-84", "ovs_interfaceid": "69e73394-845a-4108-8b2f-6b23a000d98c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1187.647428] env[69328]: DEBUG oslo_vmware.api [None req-4133dd10-a312-4913-9cb5-781c38c58742 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3274257, 'name': ReconfigVM_Task, 'duration_secs': 0.219778} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.647736] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-4133dd10-a312-4913-9cb5-781c38c58742 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Reconfigured VM instance instance-0000006e to detach disk 2000 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1187.652966] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0ffdef90-fad8-4bd5-a9f2-a37aafc615f4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.670769] env[69328]: DEBUG oslo_vmware.api [None req-4133dd10-a312-4913-9cb5-781c38c58742 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 1187.670769] env[69328]: value = "task-3274264" [ 1187.670769] env[69328]: _type = "Task" [ 1187.670769] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.679433] env[69328]: DEBUG oslo_vmware.api [None req-4133dd10-a312-4913-9cb5-781c38c58742 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3274264, 'name': ReconfigVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.779636] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274260, 'name': CreateVM_Task, 'duration_secs': 0.363988} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.779841] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1187.780515] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1187.780684] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1187.781021] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1187.781294] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00eae2d6-06c5-44c4-85e8-a6ab9ed29bd5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.786894] env[69328]: DEBUG oslo_vmware.api [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Waiting for the task: (returnval){ [ 1187.786894] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5243150e-f469-7b6c-336b-08df022833cb" [ 1187.786894] env[69328]: _type = "Task" [ 1187.786894] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.795550] env[69328]: DEBUG oslo_vmware.api [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5243150e-f469-7b6c-336b-08df022833cb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.884222] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9966fe39-a7d5-4e09-95d5-beefe8e318f8 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "refresh_cache-33583ef3-252c-45d4-a514-5646f98c5f45" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1187.884431] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9966fe39-a7d5-4e09-95d5-beefe8e318f8 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquired lock "refresh_cache-33583ef3-252c-45d4-a514-5646f98c5f45" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1187.884613] env[69328]: DEBUG nova.network.neutron [None req-9966fe39-a7d5-4e09-95d5-beefe8e318f8 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1187.916745] env[69328]: DEBUG nova.scheduler.client.report [None req-66532237-f594-450a-b6b1-6c22da384129 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1187.974967] env[69328]: DEBUG oslo_vmware.api [None req-c4ad89fe-709f-4021-866f-c80eb03ee6b4 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Task: {'id': task-3274263, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.188291} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.975289] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4ad89fe-709f-4021-866f-c80eb03ee6b4 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1187.975483] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c4ad89fe-709f-4021-866f-c80eb03ee6b4 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1187.975685] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c4ad89fe-709f-4021-866f-c80eb03ee6b4 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1187.976049] env[69328]: INFO nova.compute.manager [None req-c4ad89fe-709f-4021-866f-c80eb03ee6b4 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1187.976113] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c4ad89fe-709f-4021-866f-c80eb03ee6b4 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1187.976331] env[69328]: DEBUG nova.compute.manager [-] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1187.976426] env[69328]: DEBUG nova.network.neutron [-] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1187.991274] env[69328]: DEBUG oslo_concurrency.lockutils [req-d9000f13-00fd-4539-842f-39f2f851c6f3 req-33486a5e-6e81-4abf-a9d1-5e57d1737b1c service nova] Releasing lock "refresh_cache-d19f6a2a-3a16-4031-8c20-143ccfd6f5f5" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1188.003421] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "566c3167-4cf2-4236-812f-dfbf30bbaf6f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1188.003713] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "566c3167-4cf2-4236-812f-dfbf30bbaf6f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1188.182486] env[69328]: DEBUG oslo_vmware.api [None req-4133dd10-a312-4913-9cb5-781c38c58742 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3274264, 'name': ReconfigVM_Task, 'duration_secs': 0.119165} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.182901] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-4133dd10-a312-4913-9cb5-781c38c58742 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653933', 'volume_id': 'a2de811d-614f-4456-ac21-52535c9e5fd6', 'name': 'volume-a2de811d-614f-4456-ac21-52535c9e5fd6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': 'ee3609ea-0855-47c2-874c-349c80419781', 'attached_at': '2025-04-03T17:45:05.000000', 'detached_at': '', 'volume_id': 'a2de811d-614f-4456-ac21-52535c9e5fd6', 'serial': 'a2de811d-614f-4456-ac21-52535c9e5fd6'} {{(pid=69328) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1188.183258] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4133dd10-a312-4913-9cb5-781c38c58742 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1188.184155] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6944184b-b40e-4e70-ad3e-505aff97af13 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.192188] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4133dd10-a312-4913-9cb5-781c38c58742 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1188.192460] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-05c87354-6727-4511-af9f-85a79f52f3f7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.260902] env[69328]: DEBUG nova.compute.manager [req-6275d562-0773-4ab5-9ee3-1b16c8ff28e1 req-33b4f514-0151-475e-b2ca-2cb352ba7465 service nova] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Received event network-vif-deleted-2a3862dd-bd04-40ed-9d66-1fa2418297ea {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1188.261235] env[69328]: INFO nova.compute.manager [req-6275d562-0773-4ab5-9ee3-1b16c8ff28e1 req-33b4f514-0151-475e-b2ca-2cb352ba7465 service nova] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Neutron deleted interface 2a3862dd-bd04-40ed-9d66-1fa2418297ea; detaching it from the instance and deleting it from the info cache [ 1188.261705] env[69328]: DEBUG nova.network.neutron [req-6275d562-0773-4ab5-9ee3-1b16c8ff28e1 req-33b4f514-0151-475e-b2ca-2cb352ba7465 service nova] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1188.274350] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4133dd10-a312-4913-9cb5-781c38c58742 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1188.274596] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4133dd10-a312-4913-9cb5-781c38c58742 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1188.274784] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-4133dd10-a312-4913-9cb5-781c38c58742 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Deleting the datastore file [datastore2] ee3609ea-0855-47c2-874c-349c80419781 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1188.275429] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b7f3d6ee-3c0a-48ba-b62f-82fdf07b610f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.285321] env[69328]: DEBUG oslo_vmware.api [None req-4133dd10-a312-4913-9cb5-781c38c58742 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 1188.285321] env[69328]: value = "task-3274266" [ 1188.285321] env[69328]: _type = "Task" [ 1188.285321] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.297225] env[69328]: DEBUG oslo_vmware.api [None req-4133dd10-a312-4913-9cb5-781c38c58742 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3274266, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.300800] env[69328]: DEBUG oslo_vmware.api [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5243150e-f469-7b6c-336b-08df022833cb, 'name': SearchDatastore_Task, 'duration_secs': 0.010448} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.301098] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1188.301360] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1188.301605] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1188.301754] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1188.301947] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1188.302225] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-364e264b-b3b1-4a6e-b768-c95cc447072f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.311484] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1188.311688] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1188.312521] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f23e99d-deb1-411f-a7de-5de4521e6985 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.319541] env[69328]: DEBUG oslo_vmware.api [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Waiting for the task: (returnval){ [ 1188.319541] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52cca60f-2663-da08-7754-eaa83088b493" [ 1188.319541] env[69328]: _type = "Task" [ 1188.319541] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.327916] env[69328]: DEBUG oslo_vmware.api [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52cca60f-2663-da08-7754-eaa83088b493, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.371930] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cfa89dcb-c141-458f-aaed-81b1f0a1ca0d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "33583ef3-252c-45d4-a514-5646f98c5f45" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1188.372336] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cfa89dcb-c141-458f-aaed-81b1f0a1ca0d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "33583ef3-252c-45d4-a514-5646f98c5f45" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1188.372589] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cfa89dcb-c141-458f-aaed-81b1f0a1ca0d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "33583ef3-252c-45d4-a514-5646f98c5f45-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1188.372867] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cfa89dcb-c141-458f-aaed-81b1f0a1ca0d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "33583ef3-252c-45d4-a514-5646f98c5f45-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1188.373082] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cfa89dcb-c141-458f-aaed-81b1f0a1ca0d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "33583ef3-252c-45d4-a514-5646f98c5f45-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1188.375535] env[69328]: INFO nova.compute.manager [None req-cfa89dcb-c141-458f-aaed-81b1f0a1ca0d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Terminating instance [ 1188.422074] env[69328]: DEBUG oslo_concurrency.lockutils [None req-66532237-f594-450a-b6b1-6c22da384129 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.799s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1188.447067] env[69328]: INFO nova.scheduler.client.report [None req-66532237-f594-450a-b6b1-6c22da384129 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Deleted allocations for instance 53eb70f0-1734-4386-b747-014561ba577b [ 1188.506206] env[69328]: DEBUG nova.compute.manager [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1188.602294] env[69328]: INFO nova.network.neutron [None req-9966fe39-a7d5-4e09-95d5-beefe8e318f8 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Port 571eed05-9f96-46fe-9592-59e38c00196c from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1188.602674] env[69328]: DEBUG nova.network.neutron [None req-9966fe39-a7d5-4e09-95d5-beefe8e318f8 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Updating instance_info_cache with network_info: [{"id": "509b2377-84e7-48a6-b2ed-811f288cc65c", "address": "fa:16:3e:f8:6d:5c", "network": {"id": "aed15283-4a79-4e99-8b6c-49cf754138de", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1271042528-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30209bc93a4042488f15c73b7e4733d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "de7fa486-5f28-44ae-b0cf-72234ff87546", "external-id": "nsx-vlan-transportzone-229", "segmentation_id": 229, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap509b2377-84", "ovs_interfaceid": "509b2377-84e7-48a6-b2ed-811f288cc65c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1188.745437] env[69328]: DEBUG nova.network.neutron [-] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1188.764133] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ca9acb1e-f314-4474-adf9-f4404cf0db38 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.775936] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f5b4af2-e442-4ff2-8976-c41077140de0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.797567] env[69328]: DEBUG oslo_vmware.api [None req-4133dd10-a312-4913-9cb5-781c38c58742 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3274266, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.08981} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.797819] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-4133dd10-a312-4913-9cb5-781c38c58742 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1188.798012] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4133dd10-a312-4913-9cb5-781c38c58742 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1188.798204] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-4133dd10-a312-4913-9cb5-781c38c58742 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1188.798377] env[69328]: INFO nova.compute.manager [None req-4133dd10-a312-4913-9cb5-781c38c58742 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Took 2.28 seconds to destroy the instance on the hypervisor. [ 1188.798613] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4133dd10-a312-4913-9cb5-781c38c58742 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1188.798805] env[69328]: DEBUG nova.compute.manager [-] [instance: ee3609ea-0855-47c2-874c-349c80419781] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1188.798897] env[69328]: DEBUG nova.network.neutron [-] [instance: ee3609ea-0855-47c2-874c-349c80419781] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1188.812092] env[69328]: DEBUG nova.compute.manager [req-6275d562-0773-4ab5-9ee3-1b16c8ff28e1 req-33b4f514-0151-475e-b2ca-2cb352ba7465 service nova] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Detach interface failed, port_id=2a3862dd-bd04-40ed-9d66-1fa2418297ea, reason: Instance c751ef77-c3be-46cd-b7eb-fe139bf0998b could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1188.830487] env[69328]: DEBUG oslo_vmware.api [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52cca60f-2663-da08-7754-eaa83088b493, 'name': SearchDatastore_Task, 'duration_secs': 0.010127} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.831331] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b5461f42-aef5-4229-b0b3-7a5bcca30cd7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.837074] env[69328]: DEBUG oslo_vmware.api [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Waiting for the task: (returnval){ [ 1188.837074] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52c794e4-46aa-45bb-58ce-c16e5393b509" [ 1188.837074] env[69328]: _type = "Task" [ 1188.837074] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.849689] env[69328]: DEBUG oslo_vmware.api [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52c794e4-46aa-45bb-58ce-c16e5393b509, 'name': SearchDatastore_Task, 'duration_secs': 0.009963} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.849939] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1188.850205] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] d19f6a2a-3a16-4031-8c20-143ccfd6f5f5/d19f6a2a-3a16-4031-8c20-143ccfd6f5f5.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1188.850460] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f15bd0d9-10a5-4a4f-ba78-1f3138d1a045 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.857516] env[69328]: DEBUG oslo_vmware.api [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Waiting for the task: (returnval){ [ 1188.857516] env[69328]: value = "task-3274267" [ 1188.857516] env[69328]: _type = "Task" [ 1188.857516] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.865742] env[69328]: DEBUG oslo_vmware.api [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274267, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.879729] env[69328]: DEBUG nova.compute.manager [None req-cfa89dcb-c141-458f-aaed-81b1f0a1ca0d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1188.879970] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-cfa89dcb-c141-458f-aaed-81b1f0a1ca0d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1188.880930] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae769507-c28b-4673-a361-159590962bd8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.888909] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfa89dcb-c141-458f-aaed-81b1f0a1ca0d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1188.889254] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-871cd71c-43c7-4c11-bc91-cec08d85a117 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.896627] env[69328]: DEBUG oslo_vmware.api [None req-cfa89dcb-c141-458f-aaed-81b1f0a1ca0d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for the task: (returnval){ [ 1188.896627] env[69328]: value = "task-3274268" [ 1188.896627] env[69328]: _type = "Task" [ 1188.896627] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.906794] env[69328]: DEBUG oslo_vmware.api [None req-cfa89dcb-c141-458f-aaed-81b1f0a1ca0d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274268, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.958190] env[69328]: DEBUG oslo_concurrency.lockutils [None req-66532237-f594-450a-b6b1-6c22da384129 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Lock "53eb70f0-1734-4386-b747-014561ba577b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.733s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1189.036188] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1189.036603] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1189.038684] env[69328]: INFO nova.compute.claims [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1189.105807] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9966fe39-a7d5-4e09-95d5-beefe8e318f8 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Releasing lock "refresh_cache-33583ef3-252c-45d4-a514-5646f98c5f45" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1189.248452] env[69328]: INFO nova.compute.manager [-] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Took 1.27 seconds to deallocate network for instance. [ 1189.368286] env[69328]: DEBUG oslo_vmware.api [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274267, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.47255} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.368448] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] d19f6a2a-3a16-4031-8c20-143ccfd6f5f5/d19f6a2a-3a16-4031-8c20-143ccfd6f5f5.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1189.368667] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1189.368955] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d7c291b1-656a-4d73-a347-d827765ac4c7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.377593] env[69328]: DEBUG oslo_vmware.api [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Waiting for the task: (returnval){ [ 1189.377593] env[69328]: value = "task-3274269" [ 1189.377593] env[69328]: _type = "Task" [ 1189.377593] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.388125] env[69328]: DEBUG oslo_vmware.api [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274269, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.407508] env[69328]: DEBUG oslo_vmware.api [None req-cfa89dcb-c141-458f-aaed-81b1f0a1ca0d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274268, 'name': PowerOffVM_Task, 'duration_secs': 0.240473} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.407797] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfa89dcb-c141-458f-aaed-81b1f0a1ca0d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1189.408229] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-cfa89dcb-c141-458f-aaed-81b1f0a1ca0d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1189.408319] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0ae5464d-1f57-4a9e-b8b1-111b30fa9877 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.480701] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-cfa89dcb-c141-458f-aaed-81b1f0a1ca0d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1189.481052] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-cfa89dcb-c141-458f-aaed-81b1f0a1ca0d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1189.481275] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-cfa89dcb-c141-458f-aaed-81b1f0a1ca0d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Deleting the datastore file [datastore1] 33583ef3-252c-45d4-a514-5646f98c5f45 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1189.481884] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2ee0aa9f-7ead-4ff7-877e-c12cb7f9e40b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.489914] env[69328]: DEBUG oslo_vmware.api [None req-cfa89dcb-c141-458f-aaed-81b1f0a1ca0d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for the task: (returnval){ [ 1189.489914] env[69328]: value = "task-3274271" [ 1189.489914] env[69328]: _type = "Task" [ 1189.489914] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.500150] env[69328]: DEBUG oslo_vmware.api [None req-cfa89dcb-c141-458f-aaed-81b1f0a1ca0d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274271, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.611223] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9966fe39-a7d5-4e09-95d5-beefe8e318f8 tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "interface-33583ef3-252c-45d4-a514-5646f98c5f45-571eed05-9f96-46fe-9592-59e38c00196c" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.728s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1189.731486] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-e81b1919-256d-44b9-905b-12a016edcea4 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: ff815ffb-3422-469e-9b54-b33502826513] Volume attach. Driver type: vmdk {{(pid=69328) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1189.731760] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-e81b1919-256d-44b9-905b-12a016edcea4 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: ff815ffb-3422-469e-9b54-b33502826513] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653967', 'volume_id': '7dec5135-3f2a-46ec-9d0d-3ae432471688', 'name': 'volume-7dec5135-3f2a-46ec-9d0d-3ae432471688', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ff815ffb-3422-469e-9b54-b33502826513', 'attached_at': '', 'detached_at': '', 'volume_id': '7dec5135-3f2a-46ec-9d0d-3ae432471688', 'serial': '7dec5135-3f2a-46ec-9d0d-3ae432471688'} {{(pid=69328) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1189.732749] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b9f58ee-97b9-46f8-a284-52995a546e9a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.757184] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c4ad89fe-709f-4021-866f-c80eb03ee6b4 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1189.758315] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cafbd6d1-ec5f-4ac1-b695-bf1840291dee {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.788295] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-e81b1919-256d-44b9-905b-12a016edcea4 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: ff815ffb-3422-469e-9b54-b33502826513] Reconfiguring VM instance instance-00000073 to attach disk [datastore2] volume-7dec5135-3f2a-46ec-9d0d-3ae432471688/volume-7dec5135-3f2a-46ec-9d0d-3ae432471688.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1189.788773] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-91d88591-6e0b-49bc-b284-10fff3d88f1f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.810883] env[69328]: DEBUG oslo_vmware.api [None req-e81b1919-256d-44b9-905b-12a016edcea4 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for the task: (returnval){ [ 1189.810883] env[69328]: value = "task-3274272" [ 1189.810883] env[69328]: _type = "Task" [ 1189.810883] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.820365] env[69328]: DEBUG oslo_vmware.api [None req-e81b1919-256d-44b9-905b-12a016edcea4 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3274272, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.888131] env[69328]: DEBUG oslo_vmware.api [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274269, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08326} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.888450] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1189.889334] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ccd91a2-9023-4b3c-be52-c83088ac2741 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.912037] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Reconfiguring VM instance instance-00000078 to attach disk [datastore2] d19f6a2a-3a16-4031-8c20-143ccfd6f5f5/d19f6a2a-3a16-4031-8c20-143ccfd6f5f5.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1189.912361] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c2d42cbd-9782-4b17-871f-0e359f9871ab {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.933211] env[69328]: DEBUG oslo_vmware.api [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Waiting for the task: (returnval){ [ 1189.933211] env[69328]: value = "task-3274273" [ 1189.933211] env[69328]: _type = "Task" [ 1189.933211] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.942390] env[69328]: DEBUG oslo_vmware.api [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274273, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.976289] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f42c2022-4f43-4464-98eb-24bfe7b47191 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Acquiring lock "0c83f194-9346-4e24-a0ea-815d0b454ded" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1189.976701] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f42c2022-4f43-4464-98eb-24bfe7b47191 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Lock "0c83f194-9346-4e24-a0ea-815d0b454ded" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1189.977078] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f42c2022-4f43-4464-98eb-24bfe7b47191 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Acquiring lock "0c83f194-9346-4e24-a0ea-815d0b454ded-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1189.977396] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f42c2022-4f43-4464-98eb-24bfe7b47191 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Lock "0c83f194-9346-4e24-a0ea-815d0b454ded-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1189.977677] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f42c2022-4f43-4464-98eb-24bfe7b47191 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Lock "0c83f194-9346-4e24-a0ea-815d0b454ded-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1189.980667] env[69328]: INFO nova.compute.manager [None req-f42c2022-4f43-4464-98eb-24bfe7b47191 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 0c83f194-9346-4e24-a0ea-815d0b454ded] Terminating instance [ 1190.004717] env[69328]: DEBUG oslo_vmware.api [None req-cfa89dcb-c141-458f-aaed-81b1f0a1ca0d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274271, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.195158} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.005458] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-cfa89dcb-c141-458f-aaed-81b1f0a1ca0d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1190.005458] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-cfa89dcb-c141-458f-aaed-81b1f0a1ca0d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1190.005911] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-cfa89dcb-c141-458f-aaed-81b1f0a1ca0d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1190.006215] env[69328]: INFO nova.compute.manager [None req-cfa89dcb-c141-458f-aaed-81b1f0a1ca0d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1190.006586] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cfa89dcb-c141-458f-aaed-81b1f0a1ca0d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1190.006877] env[69328]: DEBUG nova.compute.manager [-] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1190.007041] env[69328]: DEBUG nova.network.neutron [-] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1190.100267] env[69328]: DEBUG nova.network.neutron [-] [instance: ee3609ea-0855-47c2-874c-349c80419781] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1190.237113] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-324339b8-6275-4644-b553-fccce7ac949a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.246821] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d6315c0-c71d-4df7-9613-32fa5b5b352b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.284272] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dda41a66-990f-49aa-a1d4-becf75ba2fe7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.296992] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-069d5b64-becf-4584-a929-6860f51f8b4a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.302154] env[69328]: DEBUG nova.compute.manager [req-18a6734b-3aa2-4b3f-8d10-b5cc2627284c req-02f542ca-fe88-4ff4-9ada-645e66c99b2b service nova] [instance: ee3609ea-0855-47c2-874c-349c80419781] Received event network-vif-deleted-ce79bad7-6bfd-4645-bc55-71dfc049411d {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1190.314697] env[69328]: DEBUG nova.compute.provider_tree [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1190.328793] env[69328]: DEBUG oslo_vmware.api [None req-e81b1919-256d-44b9-905b-12a016edcea4 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3274272, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.448239] env[69328]: DEBUG oslo_vmware.api [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274273, 'name': ReconfigVM_Task, 'duration_secs': 0.512364} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.448523] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Reconfigured VM instance instance-00000078 to attach disk [datastore2] d19f6a2a-3a16-4031-8c20-143ccfd6f5f5/d19f6a2a-3a16-4031-8c20-143ccfd6f5f5.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1190.449501] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bbb5c901-97ab-4f12-bf60-8e2075d9b53f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.458202] env[69328]: DEBUG oslo_vmware.api [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Waiting for the task: (returnval){ [ 1190.458202] env[69328]: value = "task-3274274" [ 1190.458202] env[69328]: _type = "Task" [ 1190.458202] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.469263] env[69328]: DEBUG oslo_vmware.api [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274274, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.485177] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f42c2022-4f43-4464-98eb-24bfe7b47191 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Acquiring lock "refresh_cache-0c83f194-9346-4e24-a0ea-815d0b454ded" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1190.485372] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f42c2022-4f43-4464-98eb-24bfe7b47191 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Acquired lock "refresh_cache-0c83f194-9346-4e24-a0ea-815d0b454ded" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1190.485546] env[69328]: DEBUG nova.network.neutron [None req-f42c2022-4f43-4464-98eb-24bfe7b47191 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 0c83f194-9346-4e24-a0ea-815d0b454ded] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1190.603302] env[69328]: INFO nova.compute.manager [-] [instance: ee3609ea-0855-47c2-874c-349c80419781] Took 1.80 seconds to deallocate network for instance. [ 1190.825325] env[69328]: DEBUG nova.scheduler.client.report [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1190.828745] env[69328]: DEBUG oslo_vmware.api [None req-e81b1919-256d-44b9-905b-12a016edcea4 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3274272, 'name': ReconfigVM_Task, 'duration_secs': 0.574592} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.829655] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-e81b1919-256d-44b9-905b-12a016edcea4 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: ff815ffb-3422-469e-9b54-b33502826513] Reconfigured VM instance instance-00000073 to attach disk [datastore2] volume-7dec5135-3f2a-46ec-9d0d-3ae432471688/volume-7dec5135-3f2a-46ec-9d0d-3ae432471688.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1190.836376] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6e164ffb-e0fe-4428-a4d5-581b815c121a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.854296] env[69328]: DEBUG oslo_vmware.api [None req-e81b1919-256d-44b9-905b-12a016edcea4 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for the task: (returnval){ [ 1190.854296] env[69328]: value = "task-3274275" [ 1190.854296] env[69328]: _type = "Task" [ 1190.854296] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.865328] env[69328]: DEBUG oslo_vmware.api [None req-e81b1919-256d-44b9-905b-12a016edcea4 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3274275, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.968093] env[69328]: DEBUG oslo_vmware.api [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274274, 'name': Rename_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.007151] env[69328]: DEBUG nova.network.neutron [None req-f42c2022-4f43-4464-98eb-24bfe7b47191 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 0c83f194-9346-4e24-a0ea-815d0b454ded] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1191.058593] env[69328]: DEBUG nova.network.neutron [None req-f42c2022-4f43-4464-98eb-24bfe7b47191 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 0c83f194-9346-4e24-a0ea-815d0b454ded] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1191.149475] env[69328]: INFO nova.compute.manager [None req-4133dd10-a312-4913-9cb5-781c38c58742 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Took 0.55 seconds to detach 1 volumes for instance. [ 1191.152190] env[69328]: DEBUG nova.compute.manager [None req-4133dd10-a312-4913-9cb5-781c38c58742 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: ee3609ea-0855-47c2-874c-349c80419781] Deleting volume: a2de811d-614f-4456-ac21-52535c9e5fd6 {{(pid=69328) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1191.335159] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.298s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1191.335823] env[69328]: DEBUG nova.compute.manager [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1191.339166] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c4ad89fe-709f-4021-866f-c80eb03ee6b4 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.582s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1191.339457] env[69328]: DEBUG nova.objects.instance [None req-c4ad89fe-709f-4021-866f-c80eb03ee6b4 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Lazy-loading 'resources' on Instance uuid c751ef77-c3be-46cd-b7eb-fe139bf0998b {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1191.366261] env[69328]: DEBUG oslo_vmware.api [None req-e81b1919-256d-44b9-905b-12a016edcea4 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3274275, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.372210] env[69328]: DEBUG nova.network.neutron [-] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1191.469742] env[69328]: DEBUG oslo_vmware.api [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274274, 'name': Rename_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.560429] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f42c2022-4f43-4464-98eb-24bfe7b47191 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Releasing lock "refresh_cache-0c83f194-9346-4e24-a0ea-815d0b454ded" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1191.561081] env[69328]: DEBUG nova.compute.manager [None req-f42c2022-4f43-4464-98eb-24bfe7b47191 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 0c83f194-9346-4e24-a0ea-815d0b454ded] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1191.561432] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f42c2022-4f43-4464-98eb-24bfe7b47191 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 0c83f194-9346-4e24-a0ea-815d0b454ded] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1191.562812] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be049705-4bca-4d77-b4b8-b03aa5d45c31 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.574179] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f42c2022-4f43-4464-98eb-24bfe7b47191 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 0c83f194-9346-4e24-a0ea-815d0b454ded] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1191.574575] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-75540392-e5f4-4585-adc5-97612bc99e03 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.584886] env[69328]: DEBUG oslo_vmware.api [None req-f42c2022-4f43-4464-98eb-24bfe7b47191 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Waiting for the task: (returnval){ [ 1191.584886] env[69328]: value = "task-3274277" [ 1191.584886] env[69328]: _type = "Task" [ 1191.584886] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.593901] env[69328]: DEBUG oslo_vmware.api [None req-f42c2022-4f43-4464-98eb-24bfe7b47191 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': task-3274277, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.696073] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4133dd10-a312-4913-9cb5-781c38c58742 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1191.846435] env[69328]: DEBUG nova.compute.utils [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1191.848555] env[69328]: DEBUG nova.compute.manager [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1191.848555] env[69328]: DEBUG nova.network.neutron [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1191.867654] env[69328]: DEBUG oslo_vmware.api [None req-e81b1919-256d-44b9-905b-12a016edcea4 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3274275, 'name': ReconfigVM_Task, 'duration_secs': 0.989806} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.870351] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-e81b1919-256d-44b9-905b-12a016edcea4 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: ff815ffb-3422-469e-9b54-b33502826513] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653967', 'volume_id': '7dec5135-3f2a-46ec-9d0d-3ae432471688', 'name': 'volume-7dec5135-3f2a-46ec-9d0d-3ae432471688', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ff815ffb-3422-469e-9b54-b33502826513', 'attached_at': '', 'detached_at': '', 'volume_id': '7dec5135-3f2a-46ec-9d0d-3ae432471688', 'serial': '7dec5135-3f2a-46ec-9d0d-3ae432471688'} {{(pid=69328) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1191.876911] env[69328]: INFO nova.compute.manager [-] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Took 1.87 seconds to deallocate network for instance. [ 1191.892176] env[69328]: DEBUG nova.policy [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c1d18e6b9e284403a091afd2c3e31c1c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f357b5a9494b4849a83aa934c5d4e26b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 1191.972454] env[69328]: DEBUG oslo_vmware.api [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274274, 'name': Rename_Task, 'duration_secs': 1.26056} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.976253] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1191.978202] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0945f038-28bc-431b-acdf-1dd8a3b2da53 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.984583] env[69328]: DEBUG oslo_vmware.api [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Waiting for the task: (returnval){ [ 1191.984583] env[69328]: value = "task-3274278" [ 1191.984583] env[69328]: _type = "Task" [ 1191.984583] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.995715] env[69328]: DEBUG oslo_vmware.api [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274278, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.051510] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc618f59-1212-47aa-95e3-34f54105b0c4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.060417] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e6ed8d4-6847-4d3e-bd5d-566c8e9f7cfb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.097449] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3866da1-c3d2-4909-8cbb-d9522e6c9423 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.106062] env[69328]: DEBUG oslo_vmware.api [None req-f42c2022-4f43-4464-98eb-24bfe7b47191 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': task-3274277, 'name': PowerOffVM_Task, 'duration_secs': 0.220459} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.108297] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-f42c2022-4f43-4464-98eb-24bfe7b47191 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 0c83f194-9346-4e24-a0ea-815d0b454ded] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1192.108478] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f42c2022-4f43-4464-98eb-24bfe7b47191 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 0c83f194-9346-4e24-a0ea-815d0b454ded] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1192.108902] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-977bd69b-d523-4ded-8d07-feee40e456d5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.111409] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fa3d803-1dce-4df2-9ab5-22099fe4cf0a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.126850] env[69328]: DEBUG nova.compute.provider_tree [None req-c4ad89fe-709f-4021-866f-c80eb03ee6b4 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1192.143741] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f42c2022-4f43-4464-98eb-24bfe7b47191 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 0c83f194-9346-4e24-a0ea-815d0b454ded] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1192.144059] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f42c2022-4f43-4464-98eb-24bfe7b47191 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 0c83f194-9346-4e24-a0ea-815d0b454ded] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1192.144404] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-f42c2022-4f43-4464-98eb-24bfe7b47191 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Deleting the datastore file [datastore1] 0c83f194-9346-4e24-a0ea-815d0b454ded {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1192.144630] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5fac60fb-806c-4eb2-b077-71b6ee06687c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.151725] env[69328]: DEBUG oslo_vmware.api [None req-f42c2022-4f43-4464-98eb-24bfe7b47191 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Waiting for the task: (returnval){ [ 1192.151725] env[69328]: value = "task-3274280" [ 1192.151725] env[69328]: _type = "Task" [ 1192.151725] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.162462] env[69328]: DEBUG oslo_vmware.api [None req-f42c2022-4f43-4464-98eb-24bfe7b47191 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': task-3274280, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.177645] env[69328]: DEBUG oslo_vmware.rw_handles [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52916fc6-21d4-94e3-2b08-cf155555566d/disk-0.vmdk. {{(pid=69328) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1192.179322] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a33c0f1-a663-4b6f-bcde-845874040577 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.187173] env[69328]: DEBUG oslo_vmware.rw_handles [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52916fc6-21d4-94e3-2b08-cf155555566d/disk-0.vmdk is in state: ready. {{(pid=69328) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1192.187746] env[69328]: ERROR oslo_vmware.rw_handles [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52916fc6-21d4-94e3-2b08-cf155555566d/disk-0.vmdk due to incomplete transfer. [ 1192.187746] env[69328]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-eece307f-f96b-44a9-842b-c98ba7c267b2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.196888] env[69328]: DEBUG oslo_vmware.rw_handles [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52916fc6-21d4-94e3-2b08-cf155555566d/disk-0.vmdk. {{(pid=69328) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1192.197111] env[69328]: DEBUG nova.virt.vmwareapi.images [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Uploaded image 025a677f-beea-4695-85ea-28c156879ab9 to the Glance image server {{(pid=69328) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1192.199460] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Destroying the VM {{(pid=69328) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1192.199726] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-353a4d1b-171a-4f0d-98b0-9684bd328e48 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.207660] env[69328]: DEBUG oslo_vmware.api [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 1192.207660] env[69328]: value = "task-3274281" [ 1192.207660] env[69328]: _type = "Task" [ 1192.207660] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.217905] env[69328]: DEBUG oslo_vmware.api [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274281, 'name': Destroy_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.236848] env[69328]: DEBUG nova.network.neutron [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Successfully created port: 119833be-9532-4d57-aece-6b3a83d11e9f {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1192.351494] env[69328]: DEBUG nova.compute.manager [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1192.364264] env[69328]: DEBUG nova.compute.manager [req-35f8d5e5-f501-4ef8-ae66-38bc9bee44b5 req-35ac4752-f1af-4071-9af1-cf25636f5056 service nova] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Received event network-vif-deleted-509b2377-84e7-48a6-b2ed-811f288cc65c {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1192.386122] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cfa89dcb-c141-458f-aaed-81b1f0a1ca0d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1192.496632] env[69328]: DEBUG oslo_vmware.api [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274278, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.631055] env[69328]: DEBUG nova.scheduler.client.report [None req-c4ad89fe-709f-4021-866f-c80eb03ee6b4 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1192.660859] env[69328]: DEBUG oslo_vmware.api [None req-f42c2022-4f43-4464-98eb-24bfe7b47191 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Task: {'id': task-3274280, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.103685} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.661125] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-f42c2022-4f43-4464-98eb-24bfe7b47191 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1192.661317] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f42c2022-4f43-4464-98eb-24bfe7b47191 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 0c83f194-9346-4e24-a0ea-815d0b454ded] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1192.661493] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-f42c2022-4f43-4464-98eb-24bfe7b47191 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 0c83f194-9346-4e24-a0ea-815d0b454ded] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1192.661664] env[69328]: INFO nova.compute.manager [None req-f42c2022-4f43-4464-98eb-24bfe7b47191 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] [instance: 0c83f194-9346-4e24-a0ea-815d0b454ded] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1192.661897] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f42c2022-4f43-4464-98eb-24bfe7b47191 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1192.662098] env[69328]: DEBUG nova.compute.manager [-] [instance: 0c83f194-9346-4e24-a0ea-815d0b454ded] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1192.662196] env[69328]: DEBUG nova.network.neutron [-] [instance: 0c83f194-9346-4e24-a0ea-815d0b454ded] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1192.676879] env[69328]: DEBUG nova.network.neutron [-] [instance: 0c83f194-9346-4e24-a0ea-815d0b454ded] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1192.716641] env[69328]: DEBUG oslo_vmware.api [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274281, 'name': Destroy_Task, 'duration_secs': 0.370714} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.716900] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Destroyed the VM [ 1192.717166] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Deleting Snapshot of the VM instance {{(pid=69328) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1192.717425] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-5c45e10c-8123-4dfa-ace6-c6355792190a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.724531] env[69328]: DEBUG oslo_vmware.api [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 1192.724531] env[69328]: value = "task-3274282" [ 1192.724531] env[69328]: _type = "Task" [ 1192.724531] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.734352] env[69328]: DEBUG oslo_vmware.api [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274282, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.909051] env[69328]: DEBUG nova.objects.instance [None req-e81b1919-256d-44b9-905b-12a016edcea4 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lazy-loading 'flavor' on Instance uuid ff815ffb-3422-469e-9b54-b33502826513 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1192.996050] env[69328]: DEBUG oslo_vmware.api [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274278, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.136024] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c4ad89fe-709f-4021-866f-c80eb03ee6b4 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.797s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1193.138814] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4133dd10-a312-4913-9cb5-781c38c58742 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.443s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1193.138814] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4133dd10-a312-4913-9cb5-781c38c58742 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1193.140483] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cfa89dcb-c141-458f-aaed-81b1f0a1ca0d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.755s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1193.140716] env[69328]: DEBUG nova.objects.instance [None req-cfa89dcb-c141-458f-aaed-81b1f0a1ca0d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lazy-loading 'resources' on Instance uuid 33583ef3-252c-45d4-a514-5646f98c5f45 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1193.162327] env[69328]: INFO nova.scheduler.client.report [None req-4133dd10-a312-4913-9cb5-781c38c58742 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Deleted allocations for instance ee3609ea-0855-47c2-874c-349c80419781 [ 1193.171230] env[69328]: INFO nova.scheduler.client.report [None req-c4ad89fe-709f-4021-866f-c80eb03ee6b4 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Deleted allocations for instance c751ef77-c3be-46cd-b7eb-fe139bf0998b [ 1193.179407] env[69328]: DEBUG nova.network.neutron [-] [instance: 0c83f194-9346-4e24-a0ea-815d0b454ded] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1193.235075] env[69328]: DEBUG oslo_vmware.api [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274282, 'name': RemoveSnapshot_Task, 'duration_secs': 0.367957} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.235376] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Deleted Snapshot of the VM instance {{(pid=69328) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1193.235679] env[69328]: DEBUG nova.compute.manager [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1193.236477] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ffab4fe-c627-4508-90f6-40c33d12aa0f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.360985] env[69328]: DEBUG nova.compute.manager [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1193.389699] env[69328]: DEBUG nova.virt.hardware [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1193.389965] env[69328]: DEBUG nova.virt.hardware [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1193.390153] env[69328]: DEBUG nova.virt.hardware [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1193.390344] env[69328]: DEBUG nova.virt.hardware [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1193.390490] env[69328]: DEBUG nova.virt.hardware [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1193.390639] env[69328]: DEBUG nova.virt.hardware [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1193.390849] env[69328]: DEBUG nova.virt.hardware [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1193.391015] env[69328]: DEBUG nova.virt.hardware [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1193.391196] env[69328]: DEBUG nova.virt.hardware [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1193.391357] env[69328]: DEBUG nova.virt.hardware [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1193.391525] env[69328]: DEBUG nova.virt.hardware [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1193.392402] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-750d589f-1721-496e-97e5-b7b9adbaf696 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.401101] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42532afc-4d6c-4a35-b1df-1261d9d0df39 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.416045] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e81b1919-256d-44b9-905b-12a016edcea4 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "ff815ffb-3422-469e-9b54-b33502826513" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.323s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1193.497957] env[69328]: DEBUG oslo_vmware.api [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274278, 'name': PowerOnVM_Task, 'duration_secs': 1.278947} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.498207] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1193.498401] env[69328]: INFO nova.compute.manager [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Took 8.76 seconds to spawn the instance on the hypervisor. [ 1193.498577] env[69328]: DEBUG nova.compute.manager [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1193.499348] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feedca2f-8ca3-457b-bb30-89e6bae46497 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.665029] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d6b790fd-2918-4359-be8c-3ef05ca97c74 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquiring lock "ff815ffb-3422-469e-9b54-b33502826513" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1193.665029] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d6b790fd-2918-4359-be8c-3ef05ca97c74 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "ff815ffb-3422-469e-9b54-b33502826513" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1193.672096] env[69328]: DEBUG oslo_concurrency.lockutils [None req-4133dd10-a312-4913-9cb5-781c38c58742 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "ee3609ea-0855-47c2-874c-349c80419781" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.665s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1193.687285] env[69328]: INFO nova.compute.manager [-] [instance: 0c83f194-9346-4e24-a0ea-815d0b454ded] Took 1.02 seconds to deallocate network for instance. [ 1193.687285] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c4ad89fe-709f-4021-866f-c80eb03ee6b4 tempest-ServerRescueNegativeTestJSON-2009991415 tempest-ServerRescueNegativeTestJSON-2009991415-project-member] Lock "c751ef77-c3be-46cd-b7eb-fe139bf0998b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.377s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1193.698386] env[69328]: DEBUG nova.network.neutron [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Successfully updated port: 119833be-9532-4d57-aece-6b3a83d11e9f {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1193.755412] env[69328]: INFO nova.compute.manager [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Shelve offloading [ 1193.812952] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc259f47-fa00-4095-9560-8ccb790bae59 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.820822] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4847ca18-83d1-4d9e-89db-2ce46d1446e2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.852505] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82b6a68b-bba5-4060-89c7-b940be3f80e6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.861596] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ed9f24e-4060-4c42-a499-3485b575a7b2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.872832] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a548a7ab-1d45-4fa7-8ea7-ad0d9144a44d tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "c1829dcf-3608-4955-bd50-eb9ee27d38e1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1193.873161] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a548a7ab-1d45-4fa7-8ea7-ad0d9144a44d tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "c1829dcf-3608-4955-bd50-eb9ee27d38e1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1193.873416] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a548a7ab-1d45-4fa7-8ea7-ad0d9144a44d tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "c1829dcf-3608-4955-bd50-eb9ee27d38e1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1193.873639] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a548a7ab-1d45-4fa7-8ea7-ad0d9144a44d tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "c1829dcf-3608-4955-bd50-eb9ee27d38e1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1193.873847] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a548a7ab-1d45-4fa7-8ea7-ad0d9144a44d tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "c1829dcf-3608-4955-bd50-eb9ee27d38e1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1193.875881] env[69328]: INFO nova.compute.manager [None req-a548a7ab-1d45-4fa7-8ea7-ad0d9144a44d tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Terminating instance [ 1193.878722] env[69328]: DEBUG nova.compute.provider_tree [None req-cfa89dcb-c141-458f-aaed-81b1f0a1ca0d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1194.016492] env[69328]: INFO nova.compute.manager [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Took 16.05 seconds to build instance. [ 1194.168187] env[69328]: INFO nova.compute.manager [None req-d6b790fd-2918-4359-be8c-3ef05ca97c74 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: ff815ffb-3422-469e-9b54-b33502826513] Detaching volume 7dec5135-3f2a-46ec-9d0d-3ae432471688 [ 1194.202377] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "refresh_cache-566c3167-4cf2-4236-812f-dfbf30bbaf6f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1194.202905] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquired lock "refresh_cache-566c3167-4cf2-4236-812f-dfbf30bbaf6f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1194.203156] env[69328]: DEBUG nova.network.neutron [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1194.208623] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f42c2022-4f43-4464-98eb-24bfe7b47191 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1194.222142] env[69328]: INFO nova.virt.block_device [None req-d6b790fd-2918-4359-be8c-3ef05ca97c74 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: ff815ffb-3422-469e-9b54-b33502826513] Attempting to driver detach volume 7dec5135-3f2a-46ec-9d0d-3ae432471688 from mountpoint /dev/sdb [ 1194.223359] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-d6b790fd-2918-4359-be8c-3ef05ca97c74 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: ff815ffb-3422-469e-9b54-b33502826513] Volume detach. Driver type: vmdk {{(pid=69328) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1194.223629] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-d6b790fd-2918-4359-be8c-3ef05ca97c74 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: ff815ffb-3422-469e-9b54-b33502826513] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653967', 'volume_id': '7dec5135-3f2a-46ec-9d0d-3ae432471688', 'name': 'volume-7dec5135-3f2a-46ec-9d0d-3ae432471688', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ff815ffb-3422-469e-9b54-b33502826513', 'attached_at': '', 'detached_at': '', 'volume_id': '7dec5135-3f2a-46ec-9d0d-3ae432471688', 'serial': '7dec5135-3f2a-46ec-9d0d-3ae432471688'} {{(pid=69328) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1194.227236] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7d0a02d-1d81-4b3e-84d8-9144054736aa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.250365] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36f9d57f-3944-4ff4-88c8-f10b2058ab86 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.257635] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ecaa9c2-5be9-42a6-9eb3-feda1a79638f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.260201] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1194.260423] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9f836846-2c46-4ba5-b094-00d6b9cf61da {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.282531] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26248be3-b701-454a-8002-cfb79d3c7973 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.285061] env[69328]: DEBUG oslo_vmware.api [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 1194.285061] env[69328]: value = "task-3274283" [ 1194.285061] env[69328]: _type = "Task" [ 1194.285061] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.298488] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-d6b790fd-2918-4359-be8c-3ef05ca97c74 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] The volume has not been displaced from its original location: [datastore2] volume-7dec5135-3f2a-46ec-9d0d-3ae432471688/volume-7dec5135-3f2a-46ec-9d0d-3ae432471688.vmdk. No consolidation needed. {{(pid=69328) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1194.303619] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-d6b790fd-2918-4359-be8c-3ef05ca97c74 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: ff815ffb-3422-469e-9b54-b33502826513] Reconfiguring VM instance instance-00000073 to detach disk 2001 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1194.304293] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9128487b-4f58-484d-a085-5e2d808e233a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.320659] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] VM already powered off {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1194.320923] env[69328]: DEBUG nova.compute.manager [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1194.321682] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3839bd79-2011-454b-ba53-ce60a336d2b6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.325185] env[69328]: DEBUG oslo_vmware.api [None req-d6b790fd-2918-4359-be8c-3ef05ca97c74 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for the task: (returnval){ [ 1194.325185] env[69328]: value = "task-3274284" [ 1194.325185] env[69328]: _type = "Task" [ 1194.325185] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.330267] env[69328]: DEBUG oslo_concurrency.lockutils [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Acquiring lock "refresh_cache-ae46c18e-15ae-4a47-b05a-a143f10b5ab6" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1194.330511] env[69328]: DEBUG oslo_concurrency.lockutils [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Acquired lock "refresh_cache-ae46c18e-15ae-4a47-b05a-a143f10b5ab6" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1194.330698] env[69328]: DEBUG nova.network.neutron [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1194.338341] env[69328]: DEBUG oslo_vmware.api [None req-d6b790fd-2918-4359-be8c-3ef05ca97c74 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3274284, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.379457] env[69328]: DEBUG nova.compute.manager [None req-a548a7ab-1d45-4fa7-8ea7-ad0d9144a44d tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1194.379749] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a548a7ab-1d45-4fa7-8ea7-ad0d9144a44d tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1194.383552] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60914d2d-4030-478b-bbf3-47d25e5c2491 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.384862] env[69328]: DEBUG nova.scheduler.client.report [None req-cfa89dcb-c141-458f-aaed-81b1f0a1ca0d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1194.393330] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-a548a7ab-1d45-4fa7-8ea7-ad0d9144a44d tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1194.393610] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1f0f823c-e90b-4855-a81e-abd3e6048dcd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.398728] env[69328]: DEBUG nova.compute.manager [req-180967c7-d844-42a6-b7c0-ea3be47e7842 req-9ac95acd-0e67-42e6-a326-d16e72b8a9e2 service nova] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Received event network-vif-plugged-119833be-9532-4d57-aece-6b3a83d11e9f {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1194.398939] env[69328]: DEBUG oslo_concurrency.lockutils [req-180967c7-d844-42a6-b7c0-ea3be47e7842 req-9ac95acd-0e67-42e6-a326-d16e72b8a9e2 service nova] Acquiring lock "566c3167-4cf2-4236-812f-dfbf30bbaf6f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1194.399161] env[69328]: DEBUG oslo_concurrency.lockutils [req-180967c7-d844-42a6-b7c0-ea3be47e7842 req-9ac95acd-0e67-42e6-a326-d16e72b8a9e2 service nova] Lock "566c3167-4cf2-4236-812f-dfbf30bbaf6f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1194.399322] env[69328]: DEBUG oslo_concurrency.lockutils [req-180967c7-d844-42a6-b7c0-ea3be47e7842 req-9ac95acd-0e67-42e6-a326-d16e72b8a9e2 service nova] Lock "566c3167-4cf2-4236-812f-dfbf30bbaf6f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1194.399482] env[69328]: DEBUG nova.compute.manager [req-180967c7-d844-42a6-b7c0-ea3be47e7842 req-9ac95acd-0e67-42e6-a326-d16e72b8a9e2 service nova] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] No waiting events found dispatching network-vif-plugged-119833be-9532-4d57-aece-6b3a83d11e9f {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1194.399643] env[69328]: WARNING nova.compute.manager [req-180967c7-d844-42a6-b7c0-ea3be47e7842 req-9ac95acd-0e67-42e6-a326-d16e72b8a9e2 service nova] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Received unexpected event network-vif-plugged-119833be-9532-4d57-aece-6b3a83d11e9f for instance with vm_state building and task_state spawning. [ 1194.399798] env[69328]: DEBUG nova.compute.manager [req-180967c7-d844-42a6-b7c0-ea3be47e7842 req-9ac95acd-0e67-42e6-a326-d16e72b8a9e2 service nova] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Received event network-changed-119833be-9532-4d57-aece-6b3a83d11e9f {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1194.399946] env[69328]: DEBUG nova.compute.manager [req-180967c7-d844-42a6-b7c0-ea3be47e7842 req-9ac95acd-0e67-42e6-a326-d16e72b8a9e2 service nova] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Refreshing instance network info cache due to event network-changed-119833be-9532-4d57-aece-6b3a83d11e9f. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1194.400126] env[69328]: DEBUG oslo_concurrency.lockutils [req-180967c7-d844-42a6-b7c0-ea3be47e7842 req-9ac95acd-0e67-42e6-a326-d16e72b8a9e2 service nova] Acquiring lock "refresh_cache-566c3167-4cf2-4236-812f-dfbf30bbaf6f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1194.402113] env[69328]: DEBUG oslo_vmware.api [None req-a548a7ab-1d45-4fa7-8ea7-ad0d9144a44d tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 1194.402113] env[69328]: value = "task-3274285" [ 1194.402113] env[69328]: _type = "Task" [ 1194.402113] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.413691] env[69328]: DEBUG oslo_vmware.api [None req-a548a7ab-1d45-4fa7-8ea7-ad0d9144a44d tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3274285, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.519315] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c667ae25-7267-4ba2-b798-c0018d3ddcf0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lock "d19f6a2a-3a16-4031-8c20-143ccfd6f5f5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.566s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1194.747815] env[69328]: DEBUG nova.network.neutron [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1194.837609] env[69328]: DEBUG oslo_vmware.api [None req-d6b790fd-2918-4359-be8c-3ef05ca97c74 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3274284, 'name': ReconfigVM_Task, 'duration_secs': 0.345468} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.838365] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-d6b790fd-2918-4359-be8c-3ef05ca97c74 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: ff815ffb-3422-469e-9b54-b33502826513] Reconfigured VM instance instance-00000073 to detach disk 2001 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1194.842750] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a429a974-353a-4da0-b4b2-f100f0e226da {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.859926] env[69328]: DEBUG oslo_vmware.api [None req-d6b790fd-2918-4359-be8c-3ef05ca97c74 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for the task: (returnval){ [ 1194.859926] env[69328]: value = "task-3274286" [ 1194.859926] env[69328]: _type = "Task" [ 1194.859926] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.868132] env[69328]: DEBUG oslo_vmware.api [None req-d6b790fd-2918-4359-be8c-3ef05ca97c74 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3274286, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.892667] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cfa89dcb-c141-458f-aaed-81b1f0a1ca0d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.752s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1194.895585] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f42c2022-4f43-4464-98eb-24bfe7b47191 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.688s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1194.895803] env[69328]: DEBUG nova.objects.instance [None req-f42c2022-4f43-4464-98eb-24bfe7b47191 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Lazy-loading 'resources' on Instance uuid 0c83f194-9346-4e24-a0ea-815d0b454ded {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1194.912422] env[69328]: DEBUG oslo_vmware.api [None req-a548a7ab-1d45-4fa7-8ea7-ad0d9144a44d tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3274285, 'name': PowerOffVM_Task, 'duration_secs': 0.242412} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.913184] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-a548a7ab-1d45-4fa7-8ea7-ad0d9144a44d tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1194.913184] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a548a7ab-1d45-4fa7-8ea7-ad0d9144a44d tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1194.913184] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aff2d652-48d1-4935-b031-c303d1d20a62 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.920989] env[69328]: INFO nova.scheduler.client.report [None req-cfa89dcb-c141-458f-aaed-81b1f0a1ca0d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Deleted allocations for instance 33583ef3-252c-45d4-a514-5646f98c5f45 [ 1194.935020] env[69328]: DEBUG nova.network.neutron [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Updating instance_info_cache with network_info: [{"id": "119833be-9532-4d57-aece-6b3a83d11e9f", "address": "fa:16:3e:05:b2:63", "network": {"id": "4031def8-0553-461f-9528-aa338b9d7b2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1834835647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f357b5a9494b4849a83aa934c5d4e26b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "357d2811-e990-4985-9f9e-b158d10d3699", "external-id": "nsx-vlan-transportzone-641", "segmentation_id": 641, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap119833be-95", "ovs_interfaceid": "119833be-9532-4d57-aece-6b3a83d11e9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1194.989023] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a548a7ab-1d45-4fa7-8ea7-ad0d9144a44d tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1194.989147] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a548a7ab-1d45-4fa7-8ea7-ad0d9144a44d tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1194.989316] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-a548a7ab-1d45-4fa7-8ea7-ad0d9144a44d tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Deleting the datastore file [datastore2] c1829dcf-3608-4955-bd50-eb9ee27d38e1 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1194.989566] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2d73f11d-d4a1-44a0-ab9d-cf0cf382682e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.996735] env[69328]: DEBUG oslo_vmware.api [None req-a548a7ab-1d45-4fa7-8ea7-ad0d9144a44d tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 1194.996735] env[69328]: value = "task-3274288" [ 1194.996735] env[69328]: _type = "Task" [ 1194.996735] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.005608] env[69328]: DEBUG oslo_vmware.api [None req-a548a7ab-1d45-4fa7-8ea7-ad0d9144a44d tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3274288, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.132238] env[69328]: DEBUG nova.network.neutron [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Updating instance_info_cache with network_info: [{"id": "19978029-822a-48e0-b3c1-9d885b82a5f3", "address": "fa:16:3e:99:f9:c3", "network": {"id": "3be6b159-5d5c-4752-b79d-0ed6110569d0", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-915151552-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.195", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f50ac50ef6ae4abc83a8064746de7029", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe20ef0e-0991-44d7-887d-08dddac0b56b", "external-id": "nsx-vlan-transportzone-991", "segmentation_id": 991, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19978029-82", "ovs_interfaceid": "19978029-822a-48e0-b3c1-9d885b82a5f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1195.369722] env[69328]: DEBUG oslo_vmware.api [None req-d6b790fd-2918-4359-be8c-3ef05ca97c74 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3274286, 'name': ReconfigVM_Task, 'duration_secs': 0.18152} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.370150] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-d6b790fd-2918-4359-be8c-3ef05ca97c74 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: ff815ffb-3422-469e-9b54-b33502826513] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653967', 'volume_id': '7dec5135-3f2a-46ec-9d0d-3ae432471688', 'name': 'volume-7dec5135-3f2a-46ec-9d0d-3ae432471688', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ff815ffb-3422-469e-9b54-b33502826513', 'attached_at': '', 'detached_at': '', 'volume_id': '7dec5135-3f2a-46ec-9d0d-3ae432471688', 'serial': '7dec5135-3f2a-46ec-9d0d-3ae432471688'} {{(pid=69328) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1195.431775] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cfa89dcb-c141-458f-aaed-81b1f0a1ca0d tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "33583ef3-252c-45d4-a514-5646f98c5f45" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.059s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1195.439478] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Releasing lock "refresh_cache-566c3167-4cf2-4236-812f-dfbf30bbaf6f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1195.439769] env[69328]: DEBUG nova.compute.manager [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Instance network_info: |[{"id": "119833be-9532-4d57-aece-6b3a83d11e9f", "address": "fa:16:3e:05:b2:63", "network": {"id": "4031def8-0553-461f-9528-aa338b9d7b2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1834835647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f357b5a9494b4849a83aa934c5d4e26b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "357d2811-e990-4985-9f9e-b158d10d3699", "external-id": "nsx-vlan-transportzone-641", "segmentation_id": 641, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap119833be-95", "ovs_interfaceid": "119833be-9532-4d57-aece-6b3a83d11e9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1195.442966] env[69328]: DEBUG oslo_concurrency.lockutils [req-180967c7-d844-42a6-b7c0-ea3be47e7842 req-9ac95acd-0e67-42e6-a326-d16e72b8a9e2 service nova] Acquired lock "refresh_cache-566c3167-4cf2-4236-812f-dfbf30bbaf6f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1195.443068] env[69328]: DEBUG nova.network.neutron [req-180967c7-d844-42a6-b7c0-ea3be47e7842 req-9ac95acd-0e67-42e6-a326-d16e72b8a9e2 service nova] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Refreshing network info cache for port 119833be-9532-4d57-aece-6b3a83d11e9f {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1195.444193] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:05:b2:63', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '357d2811-e990-4985-9f9e-b158d10d3699', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '119833be-9532-4d57-aece-6b3a83d11e9f', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1195.451685] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1195.455524] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1195.456408] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a9ce5632-ec8d-47c3-9ad2-06a5efe34844 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.480044] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1195.480044] env[69328]: value = "task-3274289" [ 1195.480044] env[69328]: _type = "Task" [ 1195.480044] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.490557] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274289, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.510442] env[69328]: DEBUG oslo_vmware.api [None req-a548a7ab-1d45-4fa7-8ea7-ad0d9144a44d tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3274288, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167725} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.512986] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-a548a7ab-1d45-4fa7-8ea7-ad0d9144a44d tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1195.513225] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a548a7ab-1d45-4fa7-8ea7-ad0d9144a44d tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1195.513420] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a548a7ab-1d45-4fa7-8ea7-ad0d9144a44d tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1195.513606] env[69328]: INFO nova.compute.manager [None req-a548a7ab-1d45-4fa7-8ea7-ad0d9144a44d tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1195.513865] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a548a7ab-1d45-4fa7-8ea7-ad0d9144a44d tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1195.514274] env[69328]: DEBUG nova.compute.manager [-] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1195.514375] env[69328]: DEBUG nova.network.neutron [-] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1195.546478] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5bc365e-a510-4b34-9acc-1d9a9e3b0039 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.555727] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dcd6e8b-92bd-48e7-bfe6-11ac3380d87c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.589187] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5be2f89a-d822-4cfa-aded-b4ae7ce06e79 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.598090] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d1a04b5-f7fd-4df6-9cd5-58f535dbb991 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.610604] env[69328]: DEBUG nova.compute.provider_tree [None req-f42c2022-4f43-4464-98eb-24bfe7b47191 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1195.635733] env[69328]: DEBUG oslo_concurrency.lockutils [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Releasing lock "refresh_cache-ae46c18e-15ae-4a47-b05a-a143f10b5ab6" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1195.826043] env[69328]: DEBUG nova.network.neutron [req-180967c7-d844-42a6-b7c0-ea3be47e7842 req-9ac95acd-0e67-42e6-a326-d16e72b8a9e2 service nova] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Updated VIF entry in instance network info cache for port 119833be-9532-4d57-aece-6b3a83d11e9f. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1195.826483] env[69328]: DEBUG nova.network.neutron [req-180967c7-d844-42a6-b7c0-ea3be47e7842 req-9ac95acd-0e67-42e6-a326-d16e72b8a9e2 service nova] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Updating instance_info_cache with network_info: [{"id": "119833be-9532-4d57-aece-6b3a83d11e9f", "address": "fa:16:3e:05:b2:63", "network": {"id": "4031def8-0553-461f-9528-aa338b9d7b2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1834835647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f357b5a9494b4849a83aa934c5d4e26b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "357d2811-e990-4985-9f9e-b158d10d3699", "external-id": "nsx-vlan-transportzone-641", "segmentation_id": 641, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap119833be-95", "ovs_interfaceid": "119833be-9532-4d57-aece-6b3a83d11e9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1195.909168] env[69328]: DEBUG nova.compute.manager [req-b4936792-5687-4954-b366-b896fecc144b req-9d0f9815-5f3f-4cc3-993e-ecf8e1bbb87d service nova] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Received event network-vif-unplugged-19978029-822a-48e0-b3c1-9d885b82a5f3 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1195.909386] env[69328]: DEBUG oslo_concurrency.lockutils [req-b4936792-5687-4954-b366-b896fecc144b req-9d0f9815-5f3f-4cc3-993e-ecf8e1bbb87d service nova] Acquiring lock "ae46c18e-15ae-4a47-b05a-a143f10b5ab6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1195.909716] env[69328]: DEBUG oslo_concurrency.lockutils [req-b4936792-5687-4954-b366-b896fecc144b req-9d0f9815-5f3f-4cc3-993e-ecf8e1bbb87d service nova] Lock "ae46c18e-15ae-4a47-b05a-a143f10b5ab6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1195.909840] env[69328]: DEBUG oslo_concurrency.lockutils [req-b4936792-5687-4954-b366-b896fecc144b req-9d0f9815-5f3f-4cc3-993e-ecf8e1bbb87d service nova] Lock "ae46c18e-15ae-4a47-b05a-a143f10b5ab6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1195.909911] env[69328]: DEBUG nova.compute.manager [req-b4936792-5687-4954-b366-b896fecc144b req-9d0f9815-5f3f-4cc3-993e-ecf8e1bbb87d service nova] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] No waiting events found dispatching network-vif-unplugged-19978029-822a-48e0-b3c1-9d885b82a5f3 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1195.910536] env[69328]: WARNING nova.compute.manager [req-b4936792-5687-4954-b366-b896fecc144b req-9d0f9815-5f3f-4cc3-993e-ecf8e1bbb87d service nova] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Received unexpected event network-vif-unplugged-19978029-822a-48e0-b3c1-9d885b82a5f3 for instance with vm_state shelved and task_state shelving_offloading. [ 1195.925210] env[69328]: DEBUG nova.objects.instance [None req-d6b790fd-2918-4359-be8c-3ef05ca97c74 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lazy-loading 'flavor' on Instance uuid ff815ffb-3422-469e-9b54-b33502826513 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1195.996948] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274289, 'name': CreateVM_Task, 'duration_secs': 0.469803} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.996948] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1196.002478] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1196.002766] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1196.003202] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1196.004090] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2f25d8b-e61c-4959-835f-361ae8215901 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.009731] env[69328]: DEBUG oslo_vmware.api [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1196.009731] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]521a07f4-6e48-be8a-745e-df0dca977de7" [ 1196.009731] env[69328]: _type = "Task" [ 1196.009731] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.018662] env[69328]: DEBUG oslo_vmware.api [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]521a07f4-6e48-be8a-745e-df0dca977de7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.020249] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1196.021405] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7587cf9-c074-4dea-87da-3345b34d47c1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.028087] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1196.028350] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4ea186cc-061f-4e0b-ac2b-bc89d2ec2d99 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.104759] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1196.105129] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1196.105401] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Deleting the datastore file [datastore1] ae46c18e-15ae-4a47-b05a-a143f10b5ab6 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1196.105759] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-064a17d6-c16c-4b28-b964-e82e8b31419f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.112368] env[69328]: DEBUG oslo_vmware.api [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 1196.112368] env[69328]: value = "task-3274291" [ 1196.112368] env[69328]: _type = "Task" [ 1196.112368] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.113460] env[69328]: DEBUG nova.scheduler.client.report [None req-f42c2022-4f43-4464-98eb-24bfe7b47191 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1196.127882] env[69328]: DEBUG oslo_vmware.api [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274291, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.329888] env[69328]: DEBUG oslo_concurrency.lockutils [req-180967c7-d844-42a6-b7c0-ea3be47e7842 req-9ac95acd-0e67-42e6-a326-d16e72b8a9e2 service nova] Releasing lock "refresh_cache-566c3167-4cf2-4236-812f-dfbf30bbaf6f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1196.372602] env[69328]: DEBUG nova.network.neutron [-] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1196.436167] env[69328]: DEBUG nova.compute.manager [req-7c49c80e-1b76-4be8-8112-5e76e661450c req-1a9eda6b-3b23-47f2-84ba-da940eca4977 service nova] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Received event network-changed-69e73394-845a-4108-8b2f-6b23a000d98c {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1196.436595] env[69328]: DEBUG nova.compute.manager [req-7c49c80e-1b76-4be8-8112-5e76e661450c req-1a9eda6b-3b23-47f2-84ba-da940eca4977 service nova] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Refreshing instance network info cache due to event network-changed-69e73394-845a-4108-8b2f-6b23a000d98c. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1196.436966] env[69328]: DEBUG oslo_concurrency.lockutils [req-7c49c80e-1b76-4be8-8112-5e76e661450c req-1a9eda6b-3b23-47f2-84ba-da940eca4977 service nova] Acquiring lock "refresh_cache-d19f6a2a-3a16-4031-8c20-143ccfd6f5f5" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1196.437244] env[69328]: DEBUG oslo_concurrency.lockutils [req-7c49c80e-1b76-4be8-8112-5e76e661450c req-1a9eda6b-3b23-47f2-84ba-da940eca4977 service nova] Acquired lock "refresh_cache-d19f6a2a-3a16-4031-8c20-143ccfd6f5f5" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1196.437521] env[69328]: DEBUG nova.network.neutron [req-7c49c80e-1b76-4be8-8112-5e76e661450c req-1a9eda6b-3b23-47f2-84ba-da940eca4977 service nova] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Refreshing network info cache for port 69e73394-845a-4108-8b2f-6b23a000d98c {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1196.467413] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8db9866c-31f1-4c77-930f-e530c54fc1eb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "de8e6616-0460-4a6e-918c-a27818da96e2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1196.467638] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8db9866c-31f1-4c77-930f-e530c54fc1eb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "de8e6616-0460-4a6e-918c-a27818da96e2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1196.467848] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8db9866c-31f1-4c77-930f-e530c54fc1eb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "de8e6616-0460-4a6e-918c-a27818da96e2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1196.468040] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8db9866c-31f1-4c77-930f-e530c54fc1eb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "de8e6616-0460-4a6e-918c-a27818da96e2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1196.468218] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8db9866c-31f1-4c77-930f-e530c54fc1eb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "de8e6616-0460-4a6e-918c-a27818da96e2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1196.470375] env[69328]: INFO nova.compute.manager [None req-8db9866c-31f1-4c77-930f-e530c54fc1eb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Terminating instance [ 1196.521334] env[69328]: DEBUG oslo_vmware.api [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]521a07f4-6e48-be8a-745e-df0dca977de7, 'name': SearchDatastore_Task, 'duration_secs': 0.010491} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.521334] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1196.521586] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1196.521800] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1196.521994] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1196.522215] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1196.522482] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b59db773-0355-4b6b-a3a4-01abf39cd67f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.531084] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1196.531242] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1196.531958] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d8153e55-a37a-47db-b82b-85349f37a4d9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.537019] env[69328]: DEBUG oslo_vmware.api [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1196.537019] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52185090-b36e-fd12-d955-b7542bdac4eb" [ 1196.537019] env[69328]: _type = "Task" [ 1196.537019] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.544690] env[69328]: DEBUG oslo_vmware.api [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52185090-b36e-fd12-d955-b7542bdac4eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.622082] env[69328]: DEBUG oslo_vmware.api [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274291, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.15185} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.622782] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f42c2022-4f43-4464-98eb-24bfe7b47191 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.727s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1196.624749] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1196.624939] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1196.625165] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1196.647861] env[69328]: INFO nova.scheduler.client.report [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Deleted allocations for instance ae46c18e-15ae-4a47-b05a-a143f10b5ab6 [ 1196.651128] env[69328]: INFO nova.scheduler.client.report [None req-f42c2022-4f43-4464-98eb-24bfe7b47191 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Deleted allocations for instance 0c83f194-9346-4e24-a0ea-815d0b454ded [ 1196.875800] env[69328]: INFO nova.compute.manager [-] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Took 1.36 seconds to deallocate network for instance. [ 1196.939580] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d6b790fd-2918-4359-be8c-3ef05ca97c74 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "ff815ffb-3422-469e-9b54-b33502826513" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.276s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1196.975108] env[69328]: DEBUG nova.compute.manager [None req-8db9866c-31f1-4c77-930f-e530c54fc1eb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1196.975108] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8db9866c-31f1-4c77-930f-e530c54fc1eb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1196.976047] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4df657c7-64a5-484d-9339-305eb97cbb71 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.988865] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-8db9866c-31f1-4c77-930f-e530c54fc1eb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1196.989212] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4399ed3d-9d3d-4fb4-98b0-bb0b575039fb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.996282] env[69328]: DEBUG oslo_vmware.api [None req-8db9866c-31f1-4c77-930f-e530c54fc1eb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for the task: (returnval){ [ 1196.996282] env[69328]: value = "task-3274292" [ 1196.996282] env[69328]: _type = "Task" [ 1196.996282] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.006080] env[69328]: DEBUG oslo_vmware.api [None req-8db9866c-31f1-4c77-930f-e530c54fc1eb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274292, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.046638] env[69328]: DEBUG oslo_vmware.api [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52185090-b36e-fd12-d955-b7542bdac4eb, 'name': SearchDatastore_Task, 'duration_secs': 0.011174} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.047524] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fbb17f71-7eea-41ae-9985-a88c92dfb8fa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.052849] env[69328]: DEBUG oslo_vmware.api [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1197.052849] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]526bddea-8329-28db-ea8c-ae0fb8cbb451" [ 1197.052849] env[69328]: _type = "Task" [ 1197.052849] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.062918] env[69328]: DEBUG oslo_vmware.api [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]526bddea-8329-28db-ea8c-ae0fb8cbb451, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.153578] env[69328]: DEBUG oslo_concurrency.lockutils [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1197.153912] env[69328]: DEBUG oslo_concurrency.lockutils [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1197.154217] env[69328]: DEBUG nova.objects.instance [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lazy-loading 'resources' on Instance uuid ae46c18e-15ae-4a47-b05a-a143f10b5ab6 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1197.160773] env[69328]: DEBUG oslo_concurrency.lockutils [None req-f42c2022-4f43-4464-98eb-24bfe7b47191 tempest-ServerShowV247Test-74744175 tempest-ServerShowV247Test-74744175-project-member] Lock "0c83f194-9346-4e24-a0ea-815d0b454ded" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.184s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1197.191626] env[69328]: DEBUG nova.network.neutron [req-7c49c80e-1b76-4be8-8112-5e76e661450c req-1a9eda6b-3b23-47f2-84ba-da940eca4977 service nova] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Updated VIF entry in instance network info cache for port 69e73394-845a-4108-8b2f-6b23a000d98c. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1197.192012] env[69328]: DEBUG nova.network.neutron [req-7c49c80e-1b76-4be8-8112-5e76e661450c req-1a9eda6b-3b23-47f2-84ba-da940eca4977 service nova] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Updating instance_info_cache with network_info: [{"id": "69e73394-845a-4108-8b2f-6b23a000d98c", "address": "fa:16:3e:e9:f3:15", "network": {"id": "6d64fb31-1957-4722-a4b3-46b946bfb65a", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1232247602-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "278be2f8452946b9ab9c4bce8f9a7557", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5bd281ed-ae39-485f-90ee-4ee27994b5b0", "external-id": "nsx-vlan-transportzone-305", "segmentation_id": 305, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap69e73394-84", "ovs_interfaceid": "69e73394-845a-4108-8b2f-6b23a000d98c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1197.382310] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a548a7ab-1d45-4fa7-8ea7-ad0d9144a44d tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1197.507173] env[69328]: DEBUG oslo_vmware.api [None req-8db9866c-31f1-4c77-930f-e530c54fc1eb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274292, 'name': PowerOffVM_Task, 'duration_secs': 0.236874} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.507510] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-8db9866c-31f1-4c77-930f-e530c54fc1eb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1197.507685] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8db9866c-31f1-4c77-930f-e530c54fc1eb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1197.507930] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3b4d1242-e306-40fe-ab20-981fd4f3111c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.563180] env[69328]: DEBUG oslo_vmware.api [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]526bddea-8329-28db-ea8c-ae0fb8cbb451, 'name': SearchDatastore_Task, 'duration_secs': 0.011883} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.563435] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1197.563793] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 566c3167-4cf2-4236-812f-dfbf30bbaf6f/566c3167-4cf2-4236-812f-dfbf30bbaf6f.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1197.564110] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-56ed7fe3-c496-442b-a197-4c89128e61bf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.574552] env[69328]: DEBUG oslo_vmware.api [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1197.574552] env[69328]: value = "task-3274294" [ 1197.574552] env[69328]: _type = "Task" [ 1197.574552] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.578569] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8db9866c-31f1-4c77-930f-e530c54fc1eb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1197.578779] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8db9866c-31f1-4c77-930f-e530c54fc1eb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1197.578960] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-8db9866c-31f1-4c77-930f-e530c54fc1eb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Deleting the datastore file [datastore2] de8e6616-0460-4a6e-918c-a27818da96e2 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1197.579516] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6470dd34-3686-49a7-a88d-fc1047c94c6c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.584427] env[69328]: DEBUG oslo_vmware.api [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274294, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.585620] env[69328]: DEBUG oslo_vmware.api [None req-8db9866c-31f1-4c77-930f-e530c54fc1eb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for the task: (returnval){ [ 1197.585620] env[69328]: value = "task-3274295" [ 1197.585620] env[69328]: _type = "Task" [ 1197.585620] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.592629] env[69328]: DEBUG oslo_vmware.api [None req-8db9866c-31f1-4c77-930f-e530c54fc1eb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274295, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.659535] env[69328]: DEBUG nova.objects.instance [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lazy-loading 'numa_topology' on Instance uuid ae46c18e-15ae-4a47-b05a-a143f10b5ab6 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1197.694628] env[69328]: DEBUG oslo_concurrency.lockutils [req-7c49c80e-1b76-4be8-8112-5e76e661450c req-1a9eda6b-3b23-47f2-84ba-da940eca4977 service nova] Releasing lock "refresh_cache-d19f6a2a-3a16-4031-8c20-143ccfd6f5f5" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1197.694913] env[69328]: DEBUG nova.compute.manager [req-7c49c80e-1b76-4be8-8112-5e76e661450c req-1a9eda6b-3b23-47f2-84ba-da940eca4977 service nova] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Received event network-vif-deleted-23a25695-a7ad-41dd-b5a1-29ee8d22538e {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1197.935218] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e126677-de87-4d29-97f1-ee1f3cbe0447 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquiring lock "ff815ffb-3422-469e-9b54-b33502826513" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1197.935636] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e126677-de87-4d29-97f1-ee1f3cbe0447 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "ff815ffb-3422-469e-9b54-b33502826513" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1197.936033] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e126677-de87-4d29-97f1-ee1f3cbe0447 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquiring lock "ff815ffb-3422-469e-9b54-b33502826513-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1197.936441] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e126677-de87-4d29-97f1-ee1f3cbe0447 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "ff815ffb-3422-469e-9b54-b33502826513-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1197.936684] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e126677-de87-4d29-97f1-ee1f3cbe0447 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "ff815ffb-3422-469e-9b54-b33502826513-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1197.939167] env[69328]: INFO nova.compute.manager [None req-7e126677-de87-4d29-97f1-ee1f3cbe0447 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: ff815ffb-3422-469e-9b54-b33502826513] Terminating instance [ 1198.044047] env[69328]: DEBUG nova.compute.manager [req-8cda27ab-4d2a-4fee-9d90-30b401fee439 req-c47bd75b-6bec-4287-a1f3-44a23320d7dc service nova] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Received event network-changed-19978029-822a-48e0-b3c1-9d885b82a5f3 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1198.044271] env[69328]: DEBUG nova.compute.manager [req-8cda27ab-4d2a-4fee-9d90-30b401fee439 req-c47bd75b-6bec-4287-a1f3-44a23320d7dc service nova] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Refreshing instance network info cache due to event network-changed-19978029-822a-48e0-b3c1-9d885b82a5f3. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1198.044680] env[69328]: DEBUG oslo_concurrency.lockutils [req-8cda27ab-4d2a-4fee-9d90-30b401fee439 req-c47bd75b-6bec-4287-a1f3-44a23320d7dc service nova] Acquiring lock "refresh_cache-ae46c18e-15ae-4a47-b05a-a143f10b5ab6" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1198.045151] env[69328]: DEBUG oslo_concurrency.lockutils [req-8cda27ab-4d2a-4fee-9d90-30b401fee439 req-c47bd75b-6bec-4287-a1f3-44a23320d7dc service nova] Acquired lock "refresh_cache-ae46c18e-15ae-4a47-b05a-a143f10b5ab6" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1198.045151] env[69328]: DEBUG nova.network.neutron [req-8cda27ab-4d2a-4fee-9d90-30b401fee439 req-c47bd75b-6bec-4287-a1f3-44a23320d7dc service nova] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Refreshing network info cache for port 19978029-822a-48e0-b3c1-9d885b82a5f3 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1198.084992] env[69328]: DEBUG oslo_vmware.api [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274294, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.444784} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.085260] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 566c3167-4cf2-4236-812f-dfbf30bbaf6f/566c3167-4cf2-4236-812f-dfbf30bbaf6f.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1198.085477] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1198.085722] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-173bbc53-cab4-4e97-bfd6-c43c98876893 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.095475] env[69328]: DEBUG oslo_vmware.api [None req-8db9866c-31f1-4c77-930f-e530c54fc1eb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Task: {'id': task-3274295, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.206081} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.096510] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-8db9866c-31f1-4c77-930f-e530c54fc1eb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1198.096705] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8db9866c-31f1-4c77-930f-e530c54fc1eb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1198.096883] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-8db9866c-31f1-4c77-930f-e530c54fc1eb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1198.097073] env[69328]: INFO nova.compute.manager [None req-8db9866c-31f1-4c77-930f-e530c54fc1eb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1198.097307] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8db9866c-31f1-4c77-930f-e530c54fc1eb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1198.097548] env[69328]: DEBUG oslo_vmware.api [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1198.097548] env[69328]: value = "task-3274296" [ 1198.097548] env[69328]: _type = "Task" [ 1198.097548] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.097726] env[69328]: DEBUG nova.compute.manager [-] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1198.097821] env[69328]: DEBUG nova.network.neutron [-] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1198.106354] env[69328]: DEBUG oslo_vmware.api [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274296, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.161127] env[69328]: DEBUG nova.objects.base [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=69328) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1198.268147] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-205953c7-e75d-4cfc-a4d1-3477b1f5a820 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.276282] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf8dc751-0cdb-4d72-b1a0-e13e15cb7563 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.308975] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8e88218-7c92-4d3a-b4b9-72486521aff7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.319202] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1de59fd-d0ae-48a4-8353-0d61ae584969 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.333424] env[69328]: DEBUG nova.compute.provider_tree [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1198.443284] env[69328]: DEBUG nova.compute.manager [None req-7e126677-de87-4d29-97f1-ee1f3cbe0447 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: ff815ffb-3422-469e-9b54-b33502826513] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1198.443514] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7e126677-de87-4d29-97f1-ee1f3cbe0447 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: ff815ffb-3422-469e-9b54-b33502826513] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1198.446462] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e2d67bb-45b5-43d3-b907-5beadbfadbb2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.453826] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e126677-de87-4d29-97f1-ee1f3cbe0447 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: ff815ffb-3422-469e-9b54-b33502826513] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1198.454069] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c6f3358e-808a-4fdc-8de6-6340f72f95c3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.460966] env[69328]: DEBUG oslo_vmware.api [None req-7e126677-de87-4d29-97f1-ee1f3cbe0447 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for the task: (returnval){ [ 1198.460966] env[69328]: value = "task-3274297" [ 1198.460966] env[69328]: _type = "Task" [ 1198.460966] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.469381] env[69328]: DEBUG oslo_vmware.api [None req-7e126677-de87-4d29-97f1-ee1f3cbe0447 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3274297, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.608837] env[69328]: DEBUG oslo_vmware.api [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274296, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.095504} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.609140] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1198.609870] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fa3feed-9649-4719-9952-7ce87cef8b9c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.634253] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Reconfiguring VM instance instance-00000079 to attach disk [datastore1] 566c3167-4cf2-4236-812f-dfbf30bbaf6f/566c3167-4cf2-4236-812f-dfbf30bbaf6f.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1198.634561] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-445b7857-2589-49ef-bc33-9497a9af6610 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.658397] env[69328]: DEBUG oslo_vmware.api [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1198.658397] env[69328]: value = "task-3274298" [ 1198.658397] env[69328]: _type = "Task" [ 1198.658397] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.667951] env[69328]: DEBUG oslo_vmware.api [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274298, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.781518] env[69328]: DEBUG nova.network.neutron [req-8cda27ab-4d2a-4fee-9d90-30b401fee439 req-c47bd75b-6bec-4287-a1f3-44a23320d7dc service nova] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Updated VIF entry in instance network info cache for port 19978029-822a-48e0-b3c1-9d885b82a5f3. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1198.782023] env[69328]: DEBUG nova.network.neutron [req-8cda27ab-4d2a-4fee-9d90-30b401fee439 req-c47bd75b-6bec-4287-a1f3-44a23320d7dc service nova] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Updating instance_info_cache with network_info: [{"id": "19978029-822a-48e0-b3c1-9d885b82a5f3", "address": "fa:16:3e:99:f9:c3", "network": {"id": "3be6b159-5d5c-4752-b79d-0ed6110569d0", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-915151552-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.195", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f50ac50ef6ae4abc83a8064746de7029", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap19978029-82", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1198.838063] env[69328]: DEBUG nova.scheduler.client.report [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1198.841731] env[69328]: DEBUG nova.network.neutron [-] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1198.970874] env[69328]: DEBUG oslo_vmware.api [None req-7e126677-de87-4d29-97f1-ee1f3cbe0447 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3274297, 'name': PowerOffVM_Task, 'duration_secs': 0.357531} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.971163] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e126677-de87-4d29-97f1-ee1f3cbe0447 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: ff815ffb-3422-469e-9b54-b33502826513] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1198.971337] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7e126677-de87-4d29-97f1-ee1f3cbe0447 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: ff815ffb-3422-469e-9b54-b33502826513] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1198.971592] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4db911a3-0a9b-4a73-b05e-d38004e827ad {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.064732] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7e126677-de87-4d29-97f1-ee1f3cbe0447 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: ff815ffb-3422-469e-9b54-b33502826513] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1199.064946] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7e126677-de87-4d29-97f1-ee1f3cbe0447 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: ff815ffb-3422-469e-9b54-b33502826513] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1199.065230] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e126677-de87-4d29-97f1-ee1f3cbe0447 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Deleting the datastore file [datastore2] ff815ffb-3422-469e-9b54-b33502826513 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1199.065516] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4fd8f8bf-15a3-42ff-8cff-134e9143fd52 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.071944] env[69328]: DEBUG oslo_vmware.api [None req-7e126677-de87-4d29-97f1-ee1f3cbe0447 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for the task: (returnval){ [ 1199.071944] env[69328]: value = "task-3274300" [ 1199.071944] env[69328]: _type = "Task" [ 1199.071944] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.080680] env[69328]: DEBUG oslo_vmware.api [None req-7e126677-de87-4d29-97f1-ee1f3cbe0447 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3274300, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.168208] env[69328]: DEBUG oslo_vmware.api [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274298, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.285539] env[69328]: DEBUG oslo_concurrency.lockutils [req-8cda27ab-4d2a-4fee-9d90-30b401fee439 req-c47bd75b-6bec-4287-a1f3-44a23320d7dc service nova] Releasing lock "refresh_cache-ae46c18e-15ae-4a47-b05a-a143f10b5ab6" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1199.347806] env[69328]: DEBUG oslo_concurrency.lockutils [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.193s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1199.349705] env[69328]: INFO nova.compute.manager [-] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Took 1.25 seconds to deallocate network for instance. [ 1199.350240] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a548a7ab-1d45-4fa7-8ea7-ad0d9144a44d tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.968s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1199.350463] env[69328]: DEBUG nova.objects.instance [None req-a548a7ab-1d45-4fa7-8ea7-ad0d9144a44d tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lazy-loading 'resources' on Instance uuid c1829dcf-3608-4955-bd50-eb9ee27d38e1 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1199.583400] env[69328]: DEBUG oslo_vmware.api [None req-7e126677-de87-4d29-97f1-ee1f3cbe0447 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Task: {'id': task-3274300, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.403806} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.583400] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e126677-de87-4d29-97f1-ee1f3cbe0447 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1199.583655] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7e126677-de87-4d29-97f1-ee1f3cbe0447 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: ff815ffb-3422-469e-9b54-b33502826513] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1199.583655] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7e126677-de87-4d29-97f1-ee1f3cbe0447 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: ff815ffb-3422-469e-9b54-b33502826513] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1199.583780] env[69328]: INFO nova.compute.manager [None req-7e126677-de87-4d29-97f1-ee1f3cbe0447 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] [instance: ff815ffb-3422-469e-9b54-b33502826513] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1199.584015] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7e126677-de87-4d29-97f1-ee1f3cbe0447 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1199.584236] env[69328]: DEBUG nova.compute.manager [-] [instance: ff815ffb-3422-469e-9b54-b33502826513] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1199.584315] env[69328]: DEBUG nova.network.neutron [-] [instance: ff815ffb-3422-469e-9b54-b33502826513] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1199.669866] env[69328]: DEBUG oslo_vmware.api [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274298, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.847645] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Acquiring lock "ae46c18e-15ae-4a47-b05a-a143f10b5ab6" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1199.856873] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8db9866c-31f1-4c77-930f-e530c54fc1eb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1199.858257] env[69328]: DEBUG oslo_concurrency.lockutils [None req-09676c55-adeb-4e51-a441-45a3c17a188b tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lock "ae46c18e-15ae-4a47-b05a-a143f10b5ab6" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 21.226s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1199.859313] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lock "ae46c18e-15ae-4a47-b05a-a143f10b5ab6" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.012s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1199.859550] env[69328]: INFO nova.compute.manager [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Unshelving [ 1199.969168] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3fd0f1a-d8c9-4a8d-80db-b1533590aa4d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.976976] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84f48554-51cb-439d-8b4c-b9973ebbeded {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.007523] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7e22f76-7b84-4996-96f9-c5b0aacb81bc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.015897] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f61e776-a42c-499b-bfc4-56a9c298de5d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.029633] env[69328]: DEBUG nova.compute.provider_tree [None req-a548a7ab-1d45-4fa7-8ea7-ad0d9144a44d tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1200.077121] env[69328]: DEBUG nova.compute.manager [req-2df1750c-9664-48e5-9ed3-ecf4447f616d req-dcca239e-b797-41fa-8d61-c34fb0abcdf4 service nova] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Received event network-vif-deleted-13436ecc-0cb3-4c13-bf18-f81195196ffd {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1200.077350] env[69328]: DEBUG nova.compute.manager [req-2df1750c-9664-48e5-9ed3-ecf4447f616d req-dcca239e-b797-41fa-8d61-c34fb0abcdf4 service nova] [instance: ff815ffb-3422-469e-9b54-b33502826513] Received event network-vif-deleted-ecf2d696-3969-4c5e-ac8c-0578b4981440 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1200.077877] env[69328]: INFO nova.compute.manager [req-2df1750c-9664-48e5-9ed3-ecf4447f616d req-dcca239e-b797-41fa-8d61-c34fb0abcdf4 service nova] [instance: ff815ffb-3422-469e-9b54-b33502826513] Neutron deleted interface ecf2d696-3969-4c5e-ac8c-0578b4981440; detaching it from the instance and deleting it from the info cache [ 1200.078081] env[69328]: DEBUG nova.network.neutron [req-2df1750c-9664-48e5-9ed3-ecf4447f616d req-dcca239e-b797-41fa-8d61-c34fb0abcdf4 service nova] [instance: ff815ffb-3422-469e-9b54-b33502826513] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1200.170263] env[69328]: DEBUG oslo_vmware.api [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274298, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.533110] env[69328]: DEBUG nova.scheduler.client.report [None req-a548a7ab-1d45-4fa7-8ea7-ad0d9144a44d tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1200.536191] env[69328]: DEBUG nova.network.neutron [-] [instance: ff815ffb-3422-469e-9b54-b33502826513] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1200.581026] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1d59d2d2-a8b9-48ef-b02c-e3ab2384d952 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.590854] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9140199b-fbba-406a-84ca-78934e20688c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.620026] env[69328]: DEBUG nova.compute.manager [req-2df1750c-9664-48e5-9ed3-ecf4447f616d req-dcca239e-b797-41fa-8d61-c34fb0abcdf4 service nova] [instance: ff815ffb-3422-469e-9b54-b33502826513] Detach interface failed, port_id=ecf2d696-3969-4c5e-ac8c-0578b4981440, reason: Instance ff815ffb-3422-469e-9b54-b33502826513 could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1200.671430] env[69328]: DEBUG oslo_vmware.api [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274298, 'name': ReconfigVM_Task, 'duration_secs': 2.01021} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.671751] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Reconfigured VM instance instance-00000079 to attach disk [datastore1] 566c3167-4cf2-4236-812f-dfbf30bbaf6f/566c3167-4cf2-4236-812f-dfbf30bbaf6f.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1200.672370] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-37124382-b804-4e6e-a4f5-561b0bc3c5f3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.679404] env[69328]: DEBUG oslo_vmware.api [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1200.679404] env[69328]: value = "task-3274301" [ 1200.679404] env[69328]: _type = "Task" [ 1200.679404] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.687621] env[69328]: DEBUG oslo_vmware.api [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274301, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.884271] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1201.038591] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a548a7ab-1d45-4fa7-8ea7-ad0d9144a44d tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.688s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1201.040934] env[69328]: INFO nova.compute.manager [-] [instance: ff815ffb-3422-469e-9b54-b33502826513] Took 1.46 seconds to deallocate network for instance. [ 1201.041248] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8db9866c-31f1-4c77-930f-e530c54fc1eb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.184s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1201.041471] env[69328]: DEBUG nova.objects.instance [None req-8db9866c-31f1-4c77-930f-e530c54fc1eb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lazy-loading 'resources' on Instance uuid de8e6616-0460-4a6e-918c-a27818da96e2 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1201.058182] env[69328]: INFO nova.scheduler.client.report [None req-a548a7ab-1d45-4fa7-8ea7-ad0d9144a44d tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Deleted allocations for instance c1829dcf-3608-4955-bd50-eb9ee27d38e1 [ 1201.190130] env[69328]: DEBUG oslo_vmware.api [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274301, 'name': Rename_Task, 'duration_secs': 0.148335} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.190443] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1201.190706] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-12a0091c-9b08-49cb-bed9-02984a17cfba {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.198584] env[69328]: DEBUG oslo_vmware.api [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1201.198584] env[69328]: value = "task-3274302" [ 1201.198584] env[69328]: _type = "Task" [ 1201.198584] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.205783] env[69328]: DEBUG oslo_vmware.api [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274302, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.550517] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e126677-de87-4d29-97f1-ee1f3cbe0447 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1201.568243] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a548a7ab-1d45-4fa7-8ea7-ad0d9144a44d tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "c1829dcf-3608-4955-bd50-eb9ee27d38e1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.695s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1201.644232] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9044307b-9e54-4697-ac87-9f544a28fb2c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.659078] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-883fd859-e653-4de9-ad44-9b51614b2f9b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.689691] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-852deac5-fb0e-4849-b08c-d45bcf481a24 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.696758] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30b30811-2b7a-4a67-af26-61f32e2021d8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.719788] env[69328]: DEBUG nova.compute.provider_tree [None req-8db9866c-31f1-4c77-930f-e530c54fc1eb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1201.721399] env[69328]: DEBUG oslo_vmware.api [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274302, 'name': PowerOnVM_Task, 'duration_secs': 0.452091} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.721951] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1201.722182] env[69328]: INFO nova.compute.manager [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Took 8.36 seconds to spawn the instance on the hypervisor. [ 1201.722455] env[69328]: DEBUG nova.compute.manager [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1201.723499] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88ce092b-fdb1-49e0-9bed-119ff22994cf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.223279] env[69328]: DEBUG nova.scheduler.client.report [None req-8db9866c-31f1-4c77-930f-e530c54fc1eb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1202.238991] env[69328]: INFO nova.compute.manager [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Took 13.22 seconds to build instance. [ 1202.731736] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8db9866c-31f1-4c77-930f-e530c54fc1eb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.690s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1202.734263] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.850s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1202.734510] env[69328]: DEBUG nova.objects.instance [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lazy-loading 'pci_requests' on Instance uuid ae46c18e-15ae-4a47-b05a-a143f10b5ab6 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1202.742062] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a1d0cdf3-2901-41e1-99e9-117cce4015de tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "566c3167-4cf2-4236-812f-dfbf30bbaf6f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.738s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1202.755160] env[69328]: INFO nova.scheduler.client.report [None req-8db9866c-31f1-4c77-930f-e530c54fc1eb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Deleted allocations for instance de8e6616-0460-4a6e-918c-a27818da96e2 [ 1203.068452] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1203.068700] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1203.068863] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1203.069067] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1203.069642] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1203.069855] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1203.070024] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._sync_power_states {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1203.238969] env[69328]: DEBUG nova.objects.instance [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lazy-loading 'numa_topology' on Instance uuid ae46c18e-15ae-4a47-b05a-a143f10b5ab6 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1203.263049] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8db9866c-31f1-4c77-930f-e530c54fc1eb tempest-AttachInterfacesTestJSON-1279775803 tempest-AttachInterfacesTestJSON-1279775803-project-member] Lock "de8e6616-0460-4a6e-918c-a27818da96e2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.795s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1203.574327] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Getting list of instances from cluster (obj){ [ 1203.574327] env[69328]: value = "domain-c8" [ 1203.574327] env[69328]: _type = "ClusterComputeResource" [ 1203.574327] env[69328]: } {{(pid=69328) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1203.575490] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3ad1814-4105-4a06-8bc3-8894b2883e89 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.581868] env[69328]: DEBUG nova.compute.manager [req-b7f87d71-941f-472f-bedd-907542f6a3f2 req-82e56128-b087-41be-ae64-43624b1f9563 service nova] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Received event network-changed-119833be-9532-4d57-aece-6b3a83d11e9f {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1203.582072] env[69328]: DEBUG nova.compute.manager [req-b7f87d71-941f-472f-bedd-907542f6a3f2 req-82e56128-b087-41be-ae64-43624b1f9563 service nova] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Refreshing instance network info cache due to event network-changed-119833be-9532-4d57-aece-6b3a83d11e9f. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1203.582366] env[69328]: DEBUG oslo_concurrency.lockutils [req-b7f87d71-941f-472f-bedd-907542f6a3f2 req-82e56128-b087-41be-ae64-43624b1f9563 service nova] Acquiring lock "refresh_cache-566c3167-4cf2-4236-812f-dfbf30bbaf6f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1203.582585] env[69328]: DEBUG oslo_concurrency.lockutils [req-b7f87d71-941f-472f-bedd-907542f6a3f2 req-82e56128-b087-41be-ae64-43624b1f9563 service nova] Acquired lock "refresh_cache-566c3167-4cf2-4236-812f-dfbf30bbaf6f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1203.582585] env[69328]: DEBUG nova.network.neutron [req-b7f87d71-941f-472f-bedd-907542f6a3f2 req-82e56128-b087-41be-ae64-43624b1f9563 service nova] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Refreshing network info cache for port 119833be-9532-4d57-aece-6b3a83d11e9f {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1203.594395] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Got total of 4 instances {{(pid=69328) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1203.594548] env[69328]: WARNING nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] While synchronizing instance power states, found 5 instances in the database and 4 instances on the hypervisor. [ 1203.594683] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Triggering sync for uuid b0a1441c-81e2-4131-a2ff-f5042d559d9f {{(pid=69328) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1203.594857] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Triggering sync for uuid ff815ffb-3422-469e-9b54-b33502826513 {{(pid=69328) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1203.595035] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Triggering sync for uuid 0cf68559-5f07-4006-9f7f-59027e31635d {{(pid=69328) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1203.595197] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Triggering sync for uuid d19f6a2a-3a16-4031-8c20-143ccfd6f5f5 {{(pid=69328) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1203.595357] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Triggering sync for uuid 566c3167-4cf2-4236-812f-dfbf30bbaf6f {{(pid=69328) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1203.596150] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Acquiring lock "b0a1441c-81e2-4131-a2ff-f5042d559d9f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1203.596393] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "b0a1441c-81e2-4131-a2ff-f5042d559d9f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1203.596672] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Acquiring lock "ff815ffb-3422-469e-9b54-b33502826513" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1203.596881] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Acquiring lock "0cf68559-5f07-4006-9f7f-59027e31635d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1203.597107] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "0cf68559-5f07-4006-9f7f-59027e31635d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1203.597295] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Acquiring lock "d19f6a2a-3a16-4031-8c20-143ccfd6f5f5" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1203.597479] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "d19f6a2a-3a16-4031-8c20-143ccfd6f5f5" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1203.598291] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Acquiring lock "566c3167-4cf2-4236-812f-dfbf30bbaf6f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1203.598291] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "566c3167-4cf2-4236-812f-dfbf30bbaf6f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1203.598291] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1203.598291] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69328) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1203.598863] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-692f23e7-1a25-47ed-b340-7042be86dee6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.602102] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba1000d1-bfe1-4aab-b8fe-46d8b20f8f0a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.604773] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c79bf490-b0c5-40e1-a0cc-4c96392c879d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.607494] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9531f518-886e-468d-a752-81b75872edf0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.609645] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager.update_available_resource {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1203.741444] env[69328]: INFO nova.compute.claims [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1204.112097] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1204.126527] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "b0a1441c-81e2-4131-a2ff-f5042d559d9f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.530s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1204.126970] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "d19f6a2a-3a16-4031-8c20-143ccfd6f5f5" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.529s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1204.127293] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "566c3167-4cf2-4236-812f-dfbf30bbaf6f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.529s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1204.127583] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "0cf68559-5f07-4006-9f7f-59027e31635d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.530s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1204.367911] env[69328]: DEBUG nova.network.neutron [req-b7f87d71-941f-472f-bedd-907542f6a3f2 req-82e56128-b087-41be-ae64-43624b1f9563 service nova] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Updated VIF entry in instance network info cache for port 119833be-9532-4d57-aece-6b3a83d11e9f. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1204.368335] env[69328]: DEBUG nova.network.neutron [req-b7f87d71-941f-472f-bedd-907542f6a3f2 req-82e56128-b087-41be-ae64-43624b1f9563 service nova] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Updating instance_info_cache with network_info: [{"id": "119833be-9532-4d57-aece-6b3a83d11e9f", "address": "fa:16:3e:05:b2:63", "network": {"id": "4031def8-0553-461f-9528-aa338b9d7b2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1834835647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.196", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f357b5a9494b4849a83aa934c5d4e26b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "357d2811-e990-4985-9f9e-b158d10d3699", "external-id": "nsx-vlan-transportzone-641", "segmentation_id": 641, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap119833be-95", "ovs_interfaceid": "119833be-9532-4d57-aece-6b3a83d11e9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1204.862723] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca334c61-47e1-462a-ae1e-939d2b4f8a3f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.871343] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfe75fbc-e6f6-48ec-b9de-62864f41e376 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.874584] env[69328]: DEBUG oslo_concurrency.lockutils [req-b7f87d71-941f-472f-bedd-907542f6a3f2 req-82e56128-b087-41be-ae64-43624b1f9563 service nova] Releasing lock "refresh_cache-566c3167-4cf2-4236-812f-dfbf30bbaf6f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1204.903839] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a34fb57-6091-4033-ad7f-21cfac260f6d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.911391] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-682eb6e0-4579-4943-ad7b-cc08372adb65 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.924500] env[69328]: DEBUG nova.compute.provider_tree [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1205.427638] env[69328]: DEBUG nova.scheduler.client.report [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1205.820776] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d5466a87-70da-441a-bcd8-376ab04f5268 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "b0a1441c-81e2-4131-a2ff-f5042d559d9f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1205.821071] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d5466a87-70da-441a-bcd8-376ab04f5268 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "b0a1441c-81e2-4131-a2ff-f5042d559d9f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1205.821306] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d5466a87-70da-441a-bcd8-376ab04f5268 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "b0a1441c-81e2-4131-a2ff-f5042d559d9f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1205.821489] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d5466a87-70da-441a-bcd8-376ab04f5268 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "b0a1441c-81e2-4131-a2ff-f5042d559d9f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1205.821725] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d5466a87-70da-441a-bcd8-376ab04f5268 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "b0a1441c-81e2-4131-a2ff-f5042d559d9f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1205.826351] env[69328]: INFO nova.compute.manager [None req-d5466a87-70da-441a-bcd8-376ab04f5268 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Terminating instance [ 1205.935221] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.201s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1205.937598] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e126677-de87-4d29-97f1-ee1f3cbe0447 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.387s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1205.937834] env[69328]: DEBUG nova.objects.instance [None req-7e126677-de87-4d29-97f1-ee1f3cbe0447 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lazy-loading 'resources' on Instance uuid ff815ffb-3422-469e-9b54-b33502826513 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1205.966603] env[69328]: INFO nova.network.neutron [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Updating port 19978029-822a-48e0-b3c1-9d885b82a5f3 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1206.330600] env[69328]: DEBUG nova.compute.manager [None req-d5466a87-70da-441a-bcd8-376ab04f5268 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1206.330600] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d5466a87-70da-441a-bcd8-376ab04f5268 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1206.331387] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9d69d70-1841-49b1-b100-d839a913ce57 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.339360] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5466a87-70da-441a-bcd8-376ab04f5268 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1206.339626] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5f46c80e-d760-45ab-9987-a5531fb7f0d7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.346047] env[69328]: DEBUG oslo_vmware.api [None req-d5466a87-70da-441a-bcd8-376ab04f5268 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 1206.346047] env[69328]: value = "task-3274304" [ 1206.346047] env[69328]: _type = "Task" [ 1206.346047] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.353012] env[69328]: DEBUG oslo_vmware.api [None req-d5466a87-70da-441a-bcd8-376ab04f5268 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3274304, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.554917] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdaa81fa-56c5-49ae-8399-5d8870299f0b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.562759] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-631a9ae9-345e-40fe-be84-9c959d409ea5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.595425] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f50f2ec5-77d9-4aee-9563-4930409b1d54 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.602619] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fd68b60-179d-4164-a12b-d6afc5bdbab2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.615795] env[69328]: DEBUG nova.compute.provider_tree [None req-7e126677-de87-4d29-97f1-ee1f3cbe0447 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1206.855729] env[69328]: DEBUG oslo_vmware.api [None req-d5466a87-70da-441a-bcd8-376ab04f5268 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3274304, 'name': PowerOffVM_Task, 'duration_secs': 0.224178} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.856009] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5466a87-70da-441a-bcd8-376ab04f5268 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1206.856197] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d5466a87-70da-441a-bcd8-376ab04f5268 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1206.856441] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b2a66375-57dd-46c6-8041-1d61a2882878 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.119929] env[69328]: DEBUG nova.scheduler.client.report [None req-7e126677-de87-4d29-97f1-ee1f3cbe0447 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1207.245501] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d5466a87-70da-441a-bcd8-376ab04f5268 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1207.246034] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d5466a87-70da-441a-bcd8-376ab04f5268 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1207.246314] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5466a87-70da-441a-bcd8-376ab04f5268 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Deleting the datastore file [datastore1] b0a1441c-81e2-4131-a2ff-f5042d559d9f {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1207.246599] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-885aee24-02bb-4731-ac2f-1136d2ba3fd4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.258944] env[69328]: DEBUG oslo_vmware.api [None req-d5466a87-70da-441a-bcd8-376ab04f5268 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for the task: (returnval){ [ 1207.258944] env[69328]: value = "task-3274306" [ 1207.258944] env[69328]: _type = "Task" [ 1207.258944] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.273595] env[69328]: DEBUG oslo_vmware.api [None req-d5466a87-70da-441a-bcd8-376ab04f5268 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3274306, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.364467] env[69328]: DEBUG nova.compute.manager [req-71bcd633-59e0-4ee0-b9a4-7e39143c01f5 req-db9a0d8a-3211-45b8-b498-3431309390ea service nova] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Received event network-vif-plugged-19978029-822a-48e0-b3c1-9d885b82a5f3 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1207.364791] env[69328]: DEBUG oslo_concurrency.lockutils [req-71bcd633-59e0-4ee0-b9a4-7e39143c01f5 req-db9a0d8a-3211-45b8-b498-3431309390ea service nova] Acquiring lock "ae46c18e-15ae-4a47-b05a-a143f10b5ab6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1207.365970] env[69328]: DEBUG oslo_concurrency.lockutils [req-71bcd633-59e0-4ee0-b9a4-7e39143c01f5 req-db9a0d8a-3211-45b8-b498-3431309390ea service nova] Lock "ae46c18e-15ae-4a47-b05a-a143f10b5ab6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1207.366237] env[69328]: DEBUG oslo_concurrency.lockutils [req-71bcd633-59e0-4ee0-b9a4-7e39143c01f5 req-db9a0d8a-3211-45b8-b498-3431309390ea service nova] Lock "ae46c18e-15ae-4a47-b05a-a143f10b5ab6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1207.366463] env[69328]: DEBUG nova.compute.manager [req-71bcd633-59e0-4ee0-b9a4-7e39143c01f5 req-db9a0d8a-3211-45b8-b498-3431309390ea service nova] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] No waiting events found dispatching network-vif-plugged-19978029-822a-48e0-b3c1-9d885b82a5f3 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1207.366843] env[69328]: WARNING nova.compute.manager [req-71bcd633-59e0-4ee0-b9a4-7e39143c01f5 req-db9a0d8a-3211-45b8-b498-3431309390ea service nova] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Received unexpected event network-vif-plugged-19978029-822a-48e0-b3c1-9d885b82a5f3 for instance with vm_state shelved_offloaded and task_state spawning. [ 1207.448440] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Acquiring lock "refresh_cache-ae46c18e-15ae-4a47-b05a-a143f10b5ab6" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1207.448630] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Acquired lock "refresh_cache-ae46c18e-15ae-4a47-b05a-a143f10b5ab6" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1207.448810] env[69328]: DEBUG nova.network.neutron [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1207.625291] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e126677-de87-4d29-97f1-ee1f3cbe0447 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.687s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1207.627497] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 3.516s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1207.627685] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1207.627836] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69328) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1207.629016] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2878679b-f47f-4792-a076-7778a61dfde9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.637262] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ce33b58-4c9a-4a02-ae41-ac8beaf8b1e1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.655297] env[69328]: INFO nova.scheduler.client.report [None req-7e126677-de87-4d29-97f1-ee1f3cbe0447 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Deleted allocations for instance ff815ffb-3422-469e-9b54-b33502826513 [ 1207.657147] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-013ff86c-6974-4f24-a029-1c60f5a4dfd8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.670503] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cd45973-30f2-4b5b-8589-f39a2fb6a5cf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.706622] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179007MB free_disk=116GB free_vcpus=48 pci_devices=None {{(pid=69328) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1207.707175] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1207.707175] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1207.769603] env[69328]: DEBUG oslo_vmware.api [None req-d5466a87-70da-441a-bcd8-376ab04f5268 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Task: {'id': task-3274306, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136464} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.769857] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5466a87-70da-441a-bcd8-376ab04f5268 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1207.770057] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d5466a87-70da-441a-bcd8-376ab04f5268 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1207.770243] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d5466a87-70da-441a-bcd8-376ab04f5268 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1207.770416] env[69328]: INFO nova.compute.manager [None req-d5466a87-70da-441a-bcd8-376ab04f5268 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Took 1.44 seconds to destroy the instance on the hypervisor. [ 1207.770659] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d5466a87-70da-441a-bcd8-376ab04f5268 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1207.770849] env[69328]: DEBUG nova.compute.manager [-] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1207.770943] env[69328]: DEBUG nova.network.neutron [-] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1208.175299] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7e126677-de87-4d29-97f1-ee1f3cbe0447 tempest-AttachVolumeNegativeTest-1234324410 tempest-AttachVolumeNegativeTest-1234324410-project-member] Lock "ff815ffb-3422-469e-9b54-b33502826513" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.239s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1208.176415] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "ff815ffb-3422-469e-9b54-b33502826513" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 4.580s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1208.176906] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-56299162-d850-45ec-aa6d-ef03f0ac2ac3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.190261] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d55e3c5c-f6d5-4e55-91c7-792b33d1d4cd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.257476] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance b0a1441c-81e2-4131-a2ff-f5042d559d9f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1208.257642] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 0cf68559-5f07-4006-9f7f-59027e31635d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1208.257766] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance d19f6a2a-3a16-4031-8c20-143ccfd6f5f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1208.257884] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 566c3167-4cf2-4236-812f-dfbf30bbaf6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1208.258006] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance ae46c18e-15ae-4a47-b05a-a143f10b5ab6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1208.258211] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=69328) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1208.258354] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1472MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=69328) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1208.332336] env[69328]: DEBUG nova.network.neutron [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Updating instance_info_cache with network_info: [{"id": "19978029-822a-48e0-b3c1-9d885b82a5f3", "address": "fa:16:3e:99:f9:c3", "network": {"id": "3be6b159-5d5c-4752-b79d-0ed6110569d0", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-915151552-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.195", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f50ac50ef6ae4abc83a8064746de7029", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe20ef0e-0991-44d7-887d-08dddac0b56b", "external-id": "nsx-vlan-transportzone-991", "segmentation_id": 991, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19978029-82", "ovs_interfaceid": "19978029-822a-48e0-b3c1-9d885b82a5f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1208.388310] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74666ed0-4e76-4cc4-89a2-e2a3bd4f3f4e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.397584] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3030a59-1853-424c-b722-aaf8748db62a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.432353] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17ddb2eb-e0ae-4c47-a502-55ddb94cf118 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.440803] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba5a50fa-4fb9-4d80-877c-14e9f76c8b73 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.455358] env[69328]: DEBUG nova.compute.provider_tree [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1208.485713] env[69328]: DEBUG nova.network.neutron [-] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1208.732824] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "ff815ffb-3422-469e-9b54-b33502826513" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.556s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1208.836240] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Releasing lock "refresh_cache-ae46c18e-15ae-4a47-b05a-a143f10b5ab6" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1208.861064] env[69328]: DEBUG nova.virt.hardware [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='88650e56a07192f2b4b793e8080388cc',container_format='bare',created_at=2025-04-03T17:45:36Z,direct_url=,disk_format='vmdk',id=025a677f-beea-4695-85ea-28c156879ab9,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-602832091-shelved',owner='f50ac50ef6ae4abc83a8064746de7029',properties=ImageMetaProps,protected=,size=31669248,status='active',tags=,updated_at=2025-04-03T17:45:50Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1208.861330] env[69328]: DEBUG nova.virt.hardware [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1208.861490] env[69328]: DEBUG nova.virt.hardware [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1208.861672] env[69328]: DEBUG nova.virt.hardware [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1208.861815] env[69328]: DEBUG nova.virt.hardware [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1208.861960] env[69328]: DEBUG nova.virt.hardware [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1208.862188] env[69328]: DEBUG nova.virt.hardware [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1208.862354] env[69328]: DEBUG nova.virt.hardware [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1208.862524] env[69328]: DEBUG nova.virt.hardware [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1208.862689] env[69328]: DEBUG nova.virt.hardware [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1208.862861] env[69328]: DEBUG nova.virt.hardware [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1208.863801] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-683c6985-fd28-469a-8b22-0ef0358593ef {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.872684] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7fadc05-f691-482a-a045-7f23490a872e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.886437] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:99:f9:c3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fe20ef0e-0991-44d7-887d-08dddac0b56b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '19978029-822a-48e0-b3c1-9d885b82a5f3', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1208.894130] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1208.894407] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1208.894627] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1b7b9b66-66ac-40ae-bd1c-08fe8267e0d4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.914544] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1208.914544] env[69328]: value = "task-3274308" [ 1208.914544] env[69328]: _type = "Task" [ 1208.914544] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.923556] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274308, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.958793] env[69328]: DEBUG nova.scheduler.client.report [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1208.990725] env[69328]: INFO nova.compute.manager [-] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Took 1.22 seconds to deallocate network for instance. [ 1209.395320] env[69328]: DEBUG nova.compute.manager [req-cdea63d8-73e4-4a88-a2d7-21052618dc57 req-a26d959a-af7b-449f-a783-e22b8fb4eb91 service nova] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Received event network-changed-19978029-822a-48e0-b3c1-9d885b82a5f3 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1209.395631] env[69328]: DEBUG nova.compute.manager [req-cdea63d8-73e4-4a88-a2d7-21052618dc57 req-a26d959a-af7b-449f-a783-e22b8fb4eb91 service nova] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Refreshing instance network info cache due to event network-changed-19978029-822a-48e0-b3c1-9d885b82a5f3. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1209.395754] env[69328]: DEBUG oslo_concurrency.lockutils [req-cdea63d8-73e4-4a88-a2d7-21052618dc57 req-a26d959a-af7b-449f-a783-e22b8fb4eb91 service nova] Acquiring lock "refresh_cache-ae46c18e-15ae-4a47-b05a-a143f10b5ab6" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1209.395896] env[69328]: DEBUG oslo_concurrency.lockutils [req-cdea63d8-73e4-4a88-a2d7-21052618dc57 req-a26d959a-af7b-449f-a783-e22b8fb4eb91 service nova] Acquired lock "refresh_cache-ae46c18e-15ae-4a47-b05a-a143f10b5ab6" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1209.396114] env[69328]: DEBUG nova.network.neutron [req-cdea63d8-73e4-4a88-a2d7-21052618dc57 req-a26d959a-af7b-449f-a783-e22b8fb4eb91 service nova] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Refreshing network info cache for port 19978029-822a-48e0-b3c1-9d885b82a5f3 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1209.401207] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Acquiring lock "d1820cd5-bacb-4097-8d05-fffea8b64e2b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1209.401459] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Lock "d1820cd5-bacb-4097-8d05-fffea8b64e2b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1209.424846] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274308, 'name': CreateVM_Task, 'duration_secs': 0.325996} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.425095] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1209.425844] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/025a677f-beea-4695-85ea-28c156879ab9" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1209.426102] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Acquired lock "[datastore2] devstack-image-cache_base/025a677f-beea-4695-85ea-28c156879ab9" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1209.426561] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/025a677f-beea-4695-85ea-28c156879ab9" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1209.426873] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-696ee81d-c89b-4fad-9d1c-864e8516ff6e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.432940] env[69328]: DEBUG oslo_vmware.api [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 1209.432940] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d7f5b1-f69b-97fb-c4da-012b74004b98" [ 1209.432940] env[69328]: _type = "Task" [ 1209.432940] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.441404] env[69328]: DEBUG oslo_vmware.api [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d7f5b1-f69b-97fb-c4da-012b74004b98, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.463627] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69328) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1209.463831] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.757s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1209.497616] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d5466a87-70da-441a-bcd8-376ab04f5268 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1209.497912] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d5466a87-70da-441a-bcd8-376ab04f5268 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1209.498182] env[69328]: DEBUG nova.objects.instance [None req-d5466a87-70da-441a-bcd8-376ab04f5268 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lazy-loading 'resources' on Instance uuid b0a1441c-81e2-4131-a2ff-f5042d559d9f {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1209.904305] env[69328]: DEBUG nova.compute.manager [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1209.950022] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Releasing lock "[datastore2] devstack-image-cache_base/025a677f-beea-4695-85ea-28c156879ab9" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1209.950022] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Processing image 025a677f-beea-4695-85ea-28c156879ab9 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1209.950022] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/025a677f-beea-4695-85ea-28c156879ab9/025a677f-beea-4695-85ea-28c156879ab9.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1209.950022] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Acquired lock "[datastore2] devstack-image-cache_base/025a677f-beea-4695-85ea-28c156879ab9/025a677f-beea-4695-85ea-28c156879ab9.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1209.950022] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1209.950022] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d461f53d-3973-4bbc-8fe7-d6e9dd831fa0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.958602] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1209.958964] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1209.959782] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-afcc8b7d-a144-4a00-8866-1fc9019f051d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.964839] env[69328]: DEBUG oslo_vmware.api [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 1209.964839] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52172811-4278-14c3-4334-bc155c56eb07" [ 1209.964839] env[69328]: _type = "Task" [ 1209.964839] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.973146] env[69328]: DEBUG oslo_vmware.api [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52172811-4278-14c3-4334-bc155c56eb07, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.085396] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58db66f4-41ab-4662-82fa-92a784b572fb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.092854] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d05d0ede-a822-48c5-a4c3-9033f0d6f0a7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.124567] env[69328]: DEBUG nova.network.neutron [req-cdea63d8-73e4-4a88-a2d7-21052618dc57 req-a26d959a-af7b-449f-a783-e22b8fb4eb91 service nova] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Updated VIF entry in instance network info cache for port 19978029-822a-48e0-b3c1-9d885b82a5f3. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1210.124894] env[69328]: DEBUG nova.network.neutron [req-cdea63d8-73e4-4a88-a2d7-21052618dc57 req-a26d959a-af7b-449f-a783-e22b8fb4eb91 service nova] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Updating instance_info_cache with network_info: [{"id": "19978029-822a-48e0-b3c1-9d885b82a5f3", "address": "fa:16:3e:99:f9:c3", "network": {"id": "3be6b159-5d5c-4752-b79d-0ed6110569d0", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-915151552-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.195", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f50ac50ef6ae4abc83a8064746de7029", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe20ef0e-0991-44d7-887d-08dddac0b56b", "external-id": "nsx-vlan-transportzone-991", "segmentation_id": 991, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19978029-82", "ovs_interfaceid": "19978029-822a-48e0-b3c1-9d885b82a5f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1210.127011] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0fe4db7-b595-4e35-a1ee-f77d67c55af6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.133984] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf22195f-fdbb-464d-8094-e2b0ac8cf05e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.147958] env[69328]: DEBUG nova.compute.provider_tree [None req-d5466a87-70da-441a-bcd8-376ab04f5268 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1210.423865] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1210.474620] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Preparing fetch location {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1210.474864] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Fetch image to [datastore2] OSTACK_IMG_21e4d26b-ca34-411f-bc67-6edb17ef6893/OSTACK_IMG_21e4d26b-ca34-411f-bc67-6edb17ef6893.vmdk {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1210.475086] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Downloading stream optimized image 025a677f-beea-4695-85ea-28c156879ab9 to [datastore2] OSTACK_IMG_21e4d26b-ca34-411f-bc67-6edb17ef6893/OSTACK_IMG_21e4d26b-ca34-411f-bc67-6edb17ef6893.vmdk on the data store datastore2 as vApp {{(pid=69328) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1210.475321] env[69328]: DEBUG nova.virt.vmwareapi.images [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Downloading image file data 025a677f-beea-4695-85ea-28c156879ab9 to the ESX as VM named 'OSTACK_IMG_21e4d26b-ca34-411f-bc67-6edb17ef6893' {{(pid=69328) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1210.549013] env[69328]: DEBUG oslo_vmware.rw_handles [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1210.549013] env[69328]: value = "resgroup-9" [ 1210.549013] env[69328]: _type = "ResourcePool" [ 1210.549013] env[69328]: }. {{(pid=69328) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1210.549280] env[69328]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-794254d1-153f-4508-bf2a-43cbc8f05bdd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.568496] env[69328]: DEBUG oslo_vmware.rw_handles [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lease: (returnval){ [ 1210.568496] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52404e12-1dc8-f683-36da-843210585bf7" [ 1210.568496] env[69328]: _type = "HttpNfcLease" [ 1210.568496] env[69328]: } obtained for vApp import into resource pool (val){ [ 1210.568496] env[69328]: value = "resgroup-9" [ 1210.568496] env[69328]: _type = "ResourcePool" [ 1210.568496] env[69328]: }. {{(pid=69328) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1210.568732] env[69328]: DEBUG oslo_vmware.api [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the lease: (returnval){ [ 1210.568732] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52404e12-1dc8-f683-36da-843210585bf7" [ 1210.568732] env[69328]: _type = "HttpNfcLease" [ 1210.568732] env[69328]: } to be ready. {{(pid=69328) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1210.576985] env[69328]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1210.576985] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52404e12-1dc8-f683-36da-843210585bf7" [ 1210.576985] env[69328]: _type = "HttpNfcLease" [ 1210.576985] env[69328]: } is initializing. {{(pid=69328) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1210.630963] env[69328]: DEBUG oslo_concurrency.lockutils [req-cdea63d8-73e4-4a88-a2d7-21052618dc57 req-a26d959a-af7b-449f-a783-e22b8fb4eb91 service nova] Releasing lock "refresh_cache-ae46c18e-15ae-4a47-b05a-a143f10b5ab6" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1210.631296] env[69328]: DEBUG nova.compute.manager [req-cdea63d8-73e4-4a88-a2d7-21052618dc57 req-a26d959a-af7b-449f-a783-e22b8fb4eb91 service nova] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Received event network-vif-deleted-9e189e9a-ecbf-475e-82a4-508c1a0aec74 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1210.650623] env[69328]: DEBUG nova.scheduler.client.report [None req-d5466a87-70da-441a-bcd8-376ab04f5268 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1211.078089] env[69328]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1211.078089] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52404e12-1dc8-f683-36da-843210585bf7" [ 1211.078089] env[69328]: _type = "HttpNfcLease" [ 1211.078089] env[69328]: } is initializing. {{(pid=69328) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1211.156219] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d5466a87-70da-441a-bcd8-376ab04f5268 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.658s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1211.159270] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.735s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1211.160745] env[69328]: INFO nova.compute.claims [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1211.179489] env[69328]: INFO nova.scheduler.client.report [None req-d5466a87-70da-441a-bcd8-376ab04f5268 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Deleted allocations for instance b0a1441c-81e2-4131-a2ff-f5042d559d9f [ 1211.577796] env[69328]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1211.577796] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52404e12-1dc8-f683-36da-843210585bf7" [ 1211.577796] env[69328]: _type = "HttpNfcLease" [ 1211.577796] env[69328]: } is initializing. {{(pid=69328) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1211.686217] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d5466a87-70da-441a-bcd8-376ab04f5268 tempest-ServerActionsTestOtherA-469847991 tempest-ServerActionsTestOtherA-469847991-project-member] Lock "b0a1441c-81e2-4131-a2ff-f5042d559d9f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.865s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1212.079083] env[69328]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1212.079083] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52404e12-1dc8-f683-36da-843210585bf7" [ 1212.079083] env[69328]: _type = "HttpNfcLease" [ 1212.079083] env[69328]: } is ready. {{(pid=69328) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1212.079465] env[69328]: DEBUG oslo_vmware.rw_handles [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1212.079465] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52404e12-1dc8-f683-36da-843210585bf7" [ 1212.079465] env[69328]: _type = "HttpNfcLease" [ 1212.079465] env[69328]: }. {{(pid=69328) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1212.080134] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39cd7d31-780e-4cf3-a429-4fc4a3e9e688 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.087755] env[69328]: DEBUG oslo_vmware.rw_handles [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d44285-df55-fd78-5627-40eb2ef41dd2/disk-0.vmdk from lease info. {{(pid=69328) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1212.087933] env[69328]: DEBUG oslo_vmware.rw_handles [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Creating HTTP connection to write to file with size = 31669248 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d44285-df55-fd78-5627-40eb2ef41dd2/disk-0.vmdk. {{(pid=69328) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1212.151737] env[69328]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d2fd7a2b-0cf8-418c-a0ed-97ee65c160a0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.241894] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58b6b1ad-c926-4c71-ac62-4ec488bb9937 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.250248] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3635a8d-2756-426b-be38-8ecbc0be1036 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.281628] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90b1187a-f017-4544-b13c-08b6f79fa8ee {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.289368] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ab82a76-3639-46c0-9810-114bf7368f4c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.302520] env[69328]: DEBUG nova.compute.provider_tree [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1212.807454] env[69328]: DEBUG nova.scheduler.client.report [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1213.259896] env[69328]: DEBUG oslo_vmware.rw_handles [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Completed reading data from the image iterator. {{(pid=69328) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1213.260240] env[69328]: DEBUG oslo_vmware.rw_handles [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d44285-df55-fd78-5627-40eb2ef41dd2/disk-0.vmdk. {{(pid=69328) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1213.261415] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c73dd09-ae85-46c1-80de-807bf913c244 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.269743] env[69328]: DEBUG oslo_vmware.rw_handles [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d44285-df55-fd78-5627-40eb2ef41dd2/disk-0.vmdk is in state: ready. {{(pid=69328) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1213.269885] env[69328]: DEBUG oslo_vmware.rw_handles [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d44285-df55-fd78-5627-40eb2ef41dd2/disk-0.vmdk. {{(pid=69328) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1213.270067] env[69328]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-b0c9cd4b-d317-4ede-8e40-b65ee6315e8f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.316138] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.157s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1213.316903] env[69328]: DEBUG nova.compute.manager [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1213.516822] env[69328]: DEBUG oslo_vmware.rw_handles [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d44285-df55-fd78-5627-40eb2ef41dd2/disk-0.vmdk. {{(pid=69328) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1213.517074] env[69328]: INFO nova.virt.vmwareapi.images [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Downloaded image file data 025a677f-beea-4695-85ea-28c156879ab9 [ 1213.517930] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-817d7770-6699-4308-bd73-b6cb6f6c643e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.534418] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ac8f89dc-8755-437b-ae4e-137616beed94 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.566827] env[69328]: INFO nova.virt.vmwareapi.images [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] The imported VM was unregistered [ 1213.569451] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Caching image {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1213.569690] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Creating directory with path [datastore2] devstack-image-cache_base/025a677f-beea-4695-85ea-28c156879ab9 {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1213.569971] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-57406004-c4e5-40cc-a3ea-92b3ef02a7c6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.583535] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Created directory with path [datastore2] devstack-image-cache_base/025a677f-beea-4695-85ea-28c156879ab9 {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1213.584783] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_21e4d26b-ca34-411f-bc67-6edb17ef6893/OSTACK_IMG_21e4d26b-ca34-411f-bc67-6edb17ef6893.vmdk to [datastore2] devstack-image-cache_base/025a677f-beea-4695-85ea-28c156879ab9/025a677f-beea-4695-85ea-28c156879ab9.vmdk. {{(pid=69328) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1213.584783] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-bcb858b8-1ec7-412c-819f-45fc4a858241 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.591031] env[69328]: DEBUG oslo_vmware.api [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 1213.591031] env[69328]: value = "task-3274312" [ 1213.591031] env[69328]: _type = "Task" [ 1213.591031] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.599359] env[69328]: DEBUG oslo_vmware.api [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274312, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.822022] env[69328]: DEBUG nova.compute.utils [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1213.823660] env[69328]: DEBUG nova.compute.manager [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1213.824377] env[69328]: DEBUG nova.network.neutron [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1213.883790] env[69328]: DEBUG nova.policy [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '26d11fab73a54bae8e1ac54e2d19cebe', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b02169b335c44970bc9248506de3f2a8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 1214.104091] env[69328]: DEBUG oslo_vmware.api [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274312, 'name': MoveVirtualDisk_Task} progress is 12%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.184080] env[69328]: DEBUG nova.network.neutron [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] Successfully created port: c5384f33-ee5c-4e98-b375-1ff0f83ac920 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1214.326871] env[69328]: DEBUG nova.compute.manager [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1214.604353] env[69328]: DEBUG oslo_vmware.api [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274312, 'name': MoveVirtualDisk_Task} progress is 32%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.106223] env[69328]: DEBUG oslo_vmware.api [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274312, 'name': MoveVirtualDisk_Task} progress is 52%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.339374] env[69328]: DEBUG nova.compute.manager [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1215.368133] env[69328]: DEBUG nova.virt.hardware [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1215.368446] env[69328]: DEBUG nova.virt.hardware [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1215.368622] env[69328]: DEBUG nova.virt.hardware [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1215.368810] env[69328]: DEBUG nova.virt.hardware [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1215.368956] env[69328]: DEBUG nova.virt.hardware [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1215.369136] env[69328]: DEBUG nova.virt.hardware [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1215.369396] env[69328]: DEBUG nova.virt.hardware [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1215.369586] env[69328]: DEBUG nova.virt.hardware [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1215.369772] env[69328]: DEBUG nova.virt.hardware [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1215.369942] env[69328]: DEBUG nova.virt.hardware [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1215.370133] env[69328]: DEBUG nova.virt.hardware [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1215.371087] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc705c22-9b6d-44de-a154-506c152693a0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.381748] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1655ae95-41e6-4061-8675-dfccda297830 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.547842] env[69328]: DEBUG nova.compute.manager [None req-464b5a4b-dce8-43fc-89dd-c807a3673b42 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1215.548931] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a2a1ea2-e6c2-4b2b-89b1-4883b704a552 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.611991] env[69328]: DEBUG oslo_vmware.api [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274312, 'name': MoveVirtualDisk_Task} progress is 74%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.618021] env[69328]: DEBUG nova.compute.manager [req-e84d6c96-b6ca-44fa-8317-be656cdefd51 req-3bd59562-7999-4ea1-a7d7-06e24438b800 service nova] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] Received event network-vif-plugged-c5384f33-ee5c-4e98-b375-1ff0f83ac920 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1215.618254] env[69328]: DEBUG oslo_concurrency.lockutils [req-e84d6c96-b6ca-44fa-8317-be656cdefd51 req-3bd59562-7999-4ea1-a7d7-06e24438b800 service nova] Acquiring lock "d1820cd5-bacb-4097-8d05-fffea8b64e2b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1215.618491] env[69328]: DEBUG oslo_concurrency.lockutils [req-e84d6c96-b6ca-44fa-8317-be656cdefd51 req-3bd59562-7999-4ea1-a7d7-06e24438b800 service nova] Lock "d1820cd5-bacb-4097-8d05-fffea8b64e2b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1215.618660] env[69328]: DEBUG oslo_concurrency.lockutils [req-e84d6c96-b6ca-44fa-8317-be656cdefd51 req-3bd59562-7999-4ea1-a7d7-06e24438b800 service nova] Lock "d1820cd5-bacb-4097-8d05-fffea8b64e2b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1215.618855] env[69328]: DEBUG nova.compute.manager [req-e84d6c96-b6ca-44fa-8317-be656cdefd51 req-3bd59562-7999-4ea1-a7d7-06e24438b800 service nova] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] No waiting events found dispatching network-vif-plugged-c5384f33-ee5c-4e98-b375-1ff0f83ac920 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1215.619406] env[69328]: WARNING nova.compute.manager [req-e84d6c96-b6ca-44fa-8317-be656cdefd51 req-3bd59562-7999-4ea1-a7d7-06e24438b800 service nova] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] Received unexpected event network-vif-plugged-c5384f33-ee5c-4e98-b375-1ff0f83ac920 for instance with vm_state building and task_state spawning. [ 1215.713625] env[69328]: DEBUG nova.network.neutron [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] Successfully updated port: c5384f33-ee5c-4e98-b375-1ff0f83ac920 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1216.062312] env[69328]: INFO nova.compute.manager [None req-464b5a4b-dce8-43fc-89dd-c807a3673b42 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] instance snapshotting [ 1216.062707] env[69328]: DEBUG nova.objects.instance [None req-464b5a4b-dce8-43fc-89dd-c807a3673b42 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lazy-loading 'flavor' on Instance uuid 0cf68559-5f07-4006-9f7f-59027e31635d {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1216.109593] env[69328]: DEBUG oslo_vmware.api [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274312, 'name': MoveVirtualDisk_Task} progress is 97%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.217163] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Acquiring lock "refresh_cache-d1820cd5-bacb-4097-8d05-fffea8b64e2b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1216.217358] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Acquired lock "refresh_cache-d1820cd5-bacb-4097-8d05-fffea8b64e2b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1216.217584] env[69328]: DEBUG nova.network.neutron [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1216.570857] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd314c68-8905-42ae-8204-0d6d5064f906 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.589841] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec452205-d25e-4b1d-9533-d91975d66301 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.606312] env[69328]: DEBUG oslo_vmware.api [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274312, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.863688} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.606623] env[69328]: INFO nova.virt.vmwareapi.ds_util [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_21e4d26b-ca34-411f-bc67-6edb17ef6893/OSTACK_IMG_21e4d26b-ca34-411f-bc67-6edb17ef6893.vmdk to [datastore2] devstack-image-cache_base/025a677f-beea-4695-85ea-28c156879ab9/025a677f-beea-4695-85ea-28c156879ab9.vmdk. [ 1216.606825] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Cleaning up location [datastore2] OSTACK_IMG_21e4d26b-ca34-411f-bc67-6edb17ef6893 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1216.606988] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_21e4d26b-ca34-411f-bc67-6edb17ef6893 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1216.607263] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-451e60a1-53d7-4417-a2fa-cf89aa88a854 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.614179] env[69328]: DEBUG oslo_vmware.api [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 1216.614179] env[69328]: value = "task-3274314" [ 1216.614179] env[69328]: _type = "Task" [ 1216.614179] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.620539] env[69328]: DEBUG oslo_vmware.api [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274314, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.758882] env[69328]: DEBUG nova.network.neutron [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1216.896972] env[69328]: DEBUG nova.network.neutron [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] Updating instance_info_cache with network_info: [{"id": "c5384f33-ee5c-4e98-b375-1ff0f83ac920", "address": "fa:16:3e:37:a4:74", "network": {"id": "06e299c6-2b3c-4fee-a180-e9eedcadaa87", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1642011170-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b02169b335c44970bc9248506de3f2a8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5384f33-ee", "ovs_interfaceid": "c5384f33-ee5c-4e98-b375-1ff0f83ac920", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1217.102933] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-464b5a4b-dce8-43fc-89dd-c807a3673b42 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Creating Snapshot of the VM instance {{(pid=69328) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1217.103385] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-6f0887e9-733e-4d54-96e6-01a6c6462310 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.113769] env[69328]: DEBUG oslo_vmware.api [None req-464b5a4b-dce8-43fc-89dd-c807a3673b42 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1217.113769] env[69328]: value = "task-3274315" [ 1217.113769] env[69328]: _type = "Task" [ 1217.113769] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.126276] env[69328]: DEBUG oslo_vmware.api [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274314, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.046606} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1217.129434] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1217.129573] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Releasing lock "[datastore2] devstack-image-cache_base/025a677f-beea-4695-85ea-28c156879ab9/025a677f-beea-4695-85ea-28c156879ab9.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1217.129778] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/025a677f-beea-4695-85ea-28c156879ab9/025a677f-beea-4695-85ea-28c156879ab9.vmdk to [datastore2] ae46c18e-15ae-4a47-b05a-a143f10b5ab6/ae46c18e-15ae-4a47-b05a-a143f10b5ab6.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1217.130070] env[69328]: DEBUG oslo_vmware.api [None req-464b5a4b-dce8-43fc-89dd-c807a3673b42 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274315, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.130293] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f28d9aac-d260-480f-a135-0fa02cb4dbb1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.136503] env[69328]: DEBUG oslo_vmware.api [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 1217.136503] env[69328]: value = "task-3274316" [ 1217.136503] env[69328]: _type = "Task" [ 1217.136503] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.144714] env[69328]: DEBUG oslo_vmware.api [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274316, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.162081] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Acquiring lock "28d608b8-c06a-4e71-b3e2-94c63619cec0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1217.162328] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Lock "28d608b8-c06a-4e71-b3e2-94c63619cec0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1217.399571] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Releasing lock "refresh_cache-d1820cd5-bacb-4097-8d05-fffea8b64e2b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1217.399940] env[69328]: DEBUG nova.compute.manager [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] Instance network_info: |[{"id": "c5384f33-ee5c-4e98-b375-1ff0f83ac920", "address": "fa:16:3e:37:a4:74", "network": {"id": "06e299c6-2b3c-4fee-a180-e9eedcadaa87", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1642011170-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b02169b335c44970bc9248506de3f2a8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5384f33-ee", "ovs_interfaceid": "c5384f33-ee5c-4e98-b375-1ff0f83ac920", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1217.400491] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:37:a4:74', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '62237242-7ce2-4664-a1c5-6783b516b507', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c5384f33-ee5c-4e98-b375-1ff0f83ac920', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1217.409242] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Creating folder: Project (b02169b335c44970bc9248506de3f2a8). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1217.409630] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4951605f-85c0-43ed-86f5-d20be8653f71 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.422496] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Created folder: Project (b02169b335c44970bc9248506de3f2a8) in parent group-v653649. [ 1217.422735] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Creating folder: Instances. Parent ref: group-v653974. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1217.423036] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-178a67bc-e9b2-491c-b3a7-1a5c89cab08c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.434563] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Created folder: Instances in parent group-v653974. [ 1217.434851] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1217.435083] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1217.435735] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-790d35e9-583b-4465-a5c8-9162d18e8098 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.455891] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1217.455891] env[69328]: value = "task-3274319" [ 1217.455891] env[69328]: _type = "Task" [ 1217.455891] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.464369] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274319, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.626720] env[69328]: DEBUG oslo_vmware.api [None req-464b5a4b-dce8-43fc-89dd-c807a3673b42 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274315, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.646242] env[69328]: DEBUG oslo_vmware.api [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274316, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.651550] env[69328]: DEBUG nova.compute.manager [req-f49104a8-fe67-4f5f-bda9-6e3b740530d6 req-d0327a42-60bd-4e0e-83e3-b4dab1931da8 service nova] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] Received event network-changed-c5384f33-ee5c-4e98-b375-1ff0f83ac920 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1217.651698] env[69328]: DEBUG nova.compute.manager [req-f49104a8-fe67-4f5f-bda9-6e3b740530d6 req-d0327a42-60bd-4e0e-83e3-b4dab1931da8 service nova] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] Refreshing instance network info cache due to event network-changed-c5384f33-ee5c-4e98-b375-1ff0f83ac920. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1217.651917] env[69328]: DEBUG oslo_concurrency.lockutils [req-f49104a8-fe67-4f5f-bda9-6e3b740530d6 req-d0327a42-60bd-4e0e-83e3-b4dab1931da8 service nova] Acquiring lock "refresh_cache-d1820cd5-bacb-4097-8d05-fffea8b64e2b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1217.652088] env[69328]: DEBUG oslo_concurrency.lockutils [req-f49104a8-fe67-4f5f-bda9-6e3b740530d6 req-d0327a42-60bd-4e0e-83e3-b4dab1931da8 service nova] Acquired lock "refresh_cache-d1820cd5-bacb-4097-8d05-fffea8b64e2b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1217.652271] env[69328]: DEBUG nova.network.neutron [req-f49104a8-fe67-4f5f-bda9-6e3b740530d6 req-d0327a42-60bd-4e0e-83e3-b4dab1931da8 service nova] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] Refreshing network info cache for port c5384f33-ee5c-4e98-b375-1ff0f83ac920 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1217.665858] env[69328]: DEBUG nova.compute.manager [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1217.971149] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274319, 'name': CreateVM_Task} progress is 25%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.127687] env[69328]: DEBUG oslo_vmware.api [None req-464b5a4b-dce8-43fc-89dd-c807a3673b42 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274315, 'name': CreateSnapshot_Task, 'duration_secs': 0.795528} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.128062] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-464b5a4b-dce8-43fc-89dd-c807a3673b42 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Created Snapshot of the VM instance {{(pid=69328) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1218.128820] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60e5b290-273f-4d63-a64e-c2552280c8a2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.149768] env[69328]: DEBUG oslo_vmware.api [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274316, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.190970] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1218.191309] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1218.192855] env[69328]: INFO nova.compute.claims [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1218.377762] env[69328]: DEBUG nova.network.neutron [req-f49104a8-fe67-4f5f-bda9-6e3b740530d6 req-d0327a42-60bd-4e0e-83e3-b4dab1931da8 service nova] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] Updated VIF entry in instance network info cache for port c5384f33-ee5c-4e98-b375-1ff0f83ac920. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1218.377762] env[69328]: DEBUG nova.network.neutron [req-f49104a8-fe67-4f5f-bda9-6e3b740530d6 req-d0327a42-60bd-4e0e-83e3-b4dab1931da8 service nova] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] Updating instance_info_cache with network_info: [{"id": "c5384f33-ee5c-4e98-b375-1ff0f83ac920", "address": "fa:16:3e:37:a4:74", "network": {"id": "06e299c6-2b3c-4fee-a180-e9eedcadaa87", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1642011170-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b02169b335c44970bc9248506de3f2a8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5384f33-ee", "ovs_interfaceid": "c5384f33-ee5c-4e98-b375-1ff0f83ac920", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1218.467795] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274319, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.651785] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-464b5a4b-dce8-43fc-89dd-c807a3673b42 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Creating linked-clone VM from snapshot {{(pid=69328) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1218.653137] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-5a5d872c-1230-42f3-b5e7-43a843b565e7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.660435] env[69328]: DEBUG oslo_vmware.api [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274316, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.670434] env[69328]: DEBUG oslo_vmware.api [None req-464b5a4b-dce8-43fc-89dd-c807a3673b42 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1218.670434] env[69328]: value = "task-3274320" [ 1218.670434] env[69328]: _type = "Task" [ 1218.670434] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.682857] env[69328]: DEBUG oslo_vmware.api [None req-464b5a4b-dce8-43fc-89dd-c807a3673b42 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274320, 'name': CloneVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.881140] env[69328]: DEBUG oslo_concurrency.lockutils [req-f49104a8-fe67-4f5f-bda9-6e3b740530d6 req-d0327a42-60bd-4e0e-83e3-b4dab1931da8 service nova] Releasing lock "refresh_cache-d1820cd5-bacb-4097-8d05-fffea8b64e2b" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1218.975793] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274319, 'name': CreateVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.151708] env[69328]: DEBUG oslo_vmware.api [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274316, 'name': CopyVirtualDisk_Task} progress is 85%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.183505] env[69328]: DEBUG oslo_vmware.api [None req-464b5a4b-dce8-43fc-89dd-c807a3673b42 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274320, 'name': CloneVM_Task} progress is 94%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.301888] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83e52f21-4ad0-4bca-8c6f-4b42ab8f6579 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.312508] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df8c2e3e-c961-4d9c-9acc-64b52f713176 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.347339] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8866ec2d-3d28-4c16-9cb4-267496f3efdd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.358715] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1762fbf1-229a-4937-bc79-cf76e8e7e432 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.375314] env[69328]: DEBUG nova.compute.provider_tree [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1219.467505] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274319, 'name': CreateVM_Task, 'duration_secs': 1.875365} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.467853] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1219.468595] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1219.468814] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1219.469741] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1219.469741] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a24f9c79-e709-4942-9a4e-18875a0f1643 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.473956] env[69328]: DEBUG oslo_vmware.api [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Waiting for the task: (returnval){ [ 1219.473956] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5235b2be-7c7b-5054-f9c8-5460dc1b0512" [ 1219.473956] env[69328]: _type = "Task" [ 1219.473956] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.483345] env[69328]: DEBUG oslo_vmware.api [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5235b2be-7c7b-5054-f9c8-5460dc1b0512, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.651616] env[69328]: DEBUG oslo_vmware.api [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274316, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.345567} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.651947] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/025a677f-beea-4695-85ea-28c156879ab9/025a677f-beea-4695-85ea-28c156879ab9.vmdk to [datastore2] ae46c18e-15ae-4a47-b05a-a143f10b5ab6/ae46c18e-15ae-4a47-b05a-a143f10b5ab6.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1219.652871] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c501b43-2f97-4886-804d-3a073b2a6630 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.677011] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Reconfiguring VM instance instance-00000071 to attach disk [datastore2] ae46c18e-15ae-4a47-b05a-a143f10b5ab6/ae46c18e-15ae-4a47-b05a-a143f10b5ab6.vmdk or device None with type streamOptimized {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1219.677393] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-07e60daa-72b1-48a1-b2a2-62aa36afc6fd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.701220] env[69328]: DEBUG oslo_vmware.api [None req-464b5a4b-dce8-43fc-89dd-c807a3673b42 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274320, 'name': CloneVM_Task} progress is 94%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.702521] env[69328]: DEBUG oslo_vmware.api [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 1219.702521] env[69328]: value = "task-3274321" [ 1219.702521] env[69328]: _type = "Task" [ 1219.702521] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.710524] env[69328]: DEBUG oslo_vmware.api [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274321, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.880119] env[69328]: DEBUG nova.scheduler.client.report [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1219.992060] env[69328]: DEBUG oslo_vmware.api [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5235b2be-7c7b-5054-f9c8-5460dc1b0512, 'name': SearchDatastore_Task, 'duration_secs': 0.014397} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.992060] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1219.992060] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1219.992060] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1219.992060] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1219.992060] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1219.992060] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f418fd6c-3c13-4c15-bbaf-54d09e41fab6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.004972] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1220.004972] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1220.004972] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01aaa68c-d418-4c5f-9e5a-a1179917376a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.010492] env[69328]: DEBUG oslo_vmware.api [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Waiting for the task: (returnval){ [ 1220.010492] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d8dad0-a284-73c1-f16c-7a7ce25b32a1" [ 1220.010492] env[69328]: _type = "Task" [ 1220.010492] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.019128] env[69328]: DEBUG oslo_vmware.api [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d8dad0-a284-73c1-f16c-7a7ce25b32a1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.188733] env[69328]: DEBUG oslo_vmware.api [None req-464b5a4b-dce8-43fc-89dd-c807a3673b42 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274320, 'name': CloneVM_Task, 'duration_secs': 1.437602} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.189273] env[69328]: INFO nova.virt.vmwareapi.vmops [None req-464b5a4b-dce8-43fc-89dd-c807a3673b42 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Created linked-clone VM from snapshot [ 1220.190047] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c340301-83d8-496c-be38-e2d94f14eb8d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.197301] env[69328]: DEBUG nova.virt.vmwareapi.images [None req-464b5a4b-dce8-43fc-89dd-c807a3673b42 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Uploading image 367697f9-6e5d-4706-b316-2b0bb5022954 {{(pid=69328) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1220.212220] env[69328]: DEBUG oslo_vmware.api [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274321, 'name': ReconfigVM_Task, 'duration_secs': 0.353999} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.214268] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Reconfigured VM instance instance-00000071 to attach disk [datastore2] ae46c18e-15ae-4a47-b05a-a143f10b5ab6/ae46c18e-15ae-4a47-b05a-a143f10b5ab6.vmdk or device None with type streamOptimized {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1220.215096] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f7b27502-eb06-4afd-b995-502890b08f57 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.221506] env[69328]: DEBUG oslo_vmware.rw_handles [None req-464b5a4b-dce8-43fc-89dd-c807a3673b42 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1220.221506] env[69328]: value = "vm-653978" [ 1220.221506] env[69328]: _type = "VirtualMachine" [ 1220.221506] env[69328]: }. {{(pid=69328) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1220.221730] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-474d13fa-f7a7-447e-ab6f-4a7a2f0b6f67 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.224617] env[69328]: DEBUG oslo_vmware.api [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 1220.224617] env[69328]: value = "task-3274322" [ 1220.224617] env[69328]: _type = "Task" [ 1220.224617] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.229518] env[69328]: DEBUG oslo_vmware.rw_handles [None req-464b5a4b-dce8-43fc-89dd-c807a3673b42 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lease: (returnval){ [ 1220.229518] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f5d01d-9630-2a74-3846-c64421207c33" [ 1220.229518] env[69328]: _type = "HttpNfcLease" [ 1220.229518] env[69328]: } obtained for exporting VM: (result){ [ 1220.229518] env[69328]: value = "vm-653978" [ 1220.229518] env[69328]: _type = "VirtualMachine" [ 1220.229518] env[69328]: }. {{(pid=69328) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1220.229790] env[69328]: DEBUG oslo_vmware.api [None req-464b5a4b-dce8-43fc-89dd-c807a3673b42 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the lease: (returnval){ [ 1220.229790] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f5d01d-9630-2a74-3846-c64421207c33" [ 1220.229790] env[69328]: _type = "HttpNfcLease" [ 1220.229790] env[69328]: } to be ready. {{(pid=69328) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1220.233353] env[69328]: DEBUG oslo_vmware.api [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274322, 'name': Rename_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.238866] env[69328]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1220.238866] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f5d01d-9630-2a74-3846-c64421207c33" [ 1220.238866] env[69328]: _type = "HttpNfcLease" [ 1220.238866] env[69328]: } is initializing. {{(pid=69328) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1220.385404] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.194s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1220.385986] env[69328]: DEBUG nova.compute.manager [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1220.520510] env[69328]: DEBUG oslo_vmware.api [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d8dad0-a284-73c1-f16c-7a7ce25b32a1, 'name': SearchDatastore_Task, 'duration_secs': 0.016955} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.521313] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-072c6141-2c07-4ecd-95dd-0bd41b99b398 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.526957] env[69328]: DEBUG oslo_vmware.api [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Waiting for the task: (returnval){ [ 1220.526957] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52a073ff-bb50-4324-e0ec-103f9fe0f0bb" [ 1220.526957] env[69328]: _type = "Task" [ 1220.526957] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.534544] env[69328]: DEBUG oslo_vmware.api [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52a073ff-bb50-4324-e0ec-103f9fe0f0bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.735347] env[69328]: DEBUG oslo_vmware.api [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274322, 'name': Rename_Task, 'duration_secs': 0.146154} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.738318] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1220.738565] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e51e639b-1b6e-4720-8f08-d098ff368f49 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.743974] env[69328]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1220.743974] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f5d01d-9630-2a74-3846-c64421207c33" [ 1220.743974] env[69328]: _type = "HttpNfcLease" [ 1220.743974] env[69328]: } is ready. {{(pid=69328) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1220.744254] env[69328]: DEBUG oslo_vmware.rw_handles [None req-464b5a4b-dce8-43fc-89dd-c807a3673b42 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1220.744254] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52f5d01d-9630-2a74-3846-c64421207c33" [ 1220.744254] env[69328]: _type = "HttpNfcLease" [ 1220.744254] env[69328]: }. {{(pid=69328) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1220.745799] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10f8591a-bd33-47c2-ac25-2f8da401474e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.748079] env[69328]: DEBUG oslo_vmware.api [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 1220.748079] env[69328]: value = "task-3274324" [ 1220.748079] env[69328]: _type = "Task" [ 1220.748079] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.753988] env[69328]: DEBUG oslo_vmware.rw_handles [None req-464b5a4b-dce8-43fc-89dd-c807a3673b42 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52343352-848c-e35e-d657-78ba2956cfcf/disk-0.vmdk from lease info. {{(pid=69328) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1220.754173] env[69328]: DEBUG oslo_vmware.rw_handles [None req-464b5a4b-dce8-43fc-89dd-c807a3673b42 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52343352-848c-e35e-d657-78ba2956cfcf/disk-0.vmdk for reading. {{(pid=69328) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1220.813073] env[69328]: DEBUG oslo_vmware.api [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274324, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.849995] env[69328]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-a68948f2-da7a-435c-87f2-f220673da0c4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.891366] env[69328]: DEBUG nova.compute.utils [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1220.892738] env[69328]: DEBUG nova.compute.manager [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1220.892914] env[69328]: DEBUG nova.network.neutron [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1220.937211] env[69328]: DEBUG nova.policy [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '77aa3cf086d64d469e6dbb88598bfc46', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f8074418da0e4a25bf9e2a46f08cf284', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 1221.038958] env[69328]: DEBUG oslo_vmware.api [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52a073ff-bb50-4324-e0ec-103f9fe0f0bb, 'name': SearchDatastore_Task, 'duration_secs': 0.010432} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.039248] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1221.039513] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] d1820cd5-bacb-4097-8d05-fffea8b64e2b/d1820cd5-bacb-4097-8d05-fffea8b64e2b.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1221.039768] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3ee1cc17-adff-4576-8b90-4f0b4b035387 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.046655] env[69328]: DEBUG oslo_vmware.api [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Waiting for the task: (returnval){ [ 1221.046655] env[69328]: value = "task-3274325" [ 1221.046655] env[69328]: _type = "Task" [ 1221.046655] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.054623] env[69328]: DEBUG oslo_vmware.api [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Task: {'id': task-3274325, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.260452] env[69328]: DEBUG oslo_vmware.api [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274324, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.305761] env[69328]: DEBUG nova.network.neutron [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Successfully created port: cf8b5fda-a9e2-4224-a9f4-4cc8b0dc8dc3 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1221.396162] env[69328]: DEBUG nova.compute.manager [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1221.559489] env[69328]: DEBUG oslo_vmware.api [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Task: {'id': task-3274325, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.759843] env[69328]: DEBUG oslo_vmware.api [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274324, 'name': PowerOnVM_Task, 'duration_secs': 0.536534} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.760598] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1221.864817] env[69328]: DEBUG nova.compute.manager [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1221.865911] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cd89428-10e8-4b43-9dfa-7ecab69d040b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.057515] env[69328]: DEBUG oslo_vmware.api [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Task: {'id': task-3274325, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.687293} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.057835] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] d1820cd5-bacb-4097-8d05-fffea8b64e2b/d1820cd5-bacb-4097-8d05-fffea8b64e2b.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1222.058151] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1222.058470] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9c3b1d37-3755-4612-b167-912255b71ccf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.064595] env[69328]: DEBUG oslo_vmware.api [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Waiting for the task: (returnval){ [ 1222.064595] env[69328]: value = "task-3274326" [ 1222.064595] env[69328]: _type = "Task" [ 1222.064595] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.073403] env[69328]: DEBUG oslo_vmware.api [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Task: {'id': task-3274326, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.384098] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ade24c8f-a487-49f0-907a-32cbcd21b3bd tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lock "ae46c18e-15ae-4a47-b05a-a143f10b5ab6" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 22.524s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1222.406204] env[69328]: DEBUG nova.compute.manager [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1222.427886] env[69328]: DEBUG nova.virt.hardware [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1222.428220] env[69328]: DEBUG nova.virt.hardware [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1222.428492] env[69328]: DEBUG nova.virt.hardware [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1222.428748] env[69328]: DEBUG nova.virt.hardware [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1222.428949] env[69328]: DEBUG nova.virt.hardware [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1222.429145] env[69328]: DEBUG nova.virt.hardware [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1222.429446] env[69328]: DEBUG nova.virt.hardware [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1222.429613] env[69328]: DEBUG nova.virt.hardware [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1222.429784] env[69328]: DEBUG nova.virt.hardware [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1222.430022] env[69328]: DEBUG nova.virt.hardware [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1222.430261] env[69328]: DEBUG nova.virt.hardware [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1222.431170] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-636c9228-8301-497b-ab88-57417acc0ee6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.439231] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9dee0d1-71f0-4e23-af5f-45fe91cfd989 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.574280] env[69328]: DEBUG oslo_vmware.api [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Task: {'id': task-3274326, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075284} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.574656] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1222.575711] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-182c3031-3da8-4084-8115-1d55e58993e8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.598101] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] Reconfiguring VM instance instance-0000007a to attach disk [datastore2] d1820cd5-bacb-4097-8d05-fffea8b64e2b/d1820cd5-bacb-4097-8d05-fffea8b64e2b.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1222.598529] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-43e5772c-5bc3-49cd-8ce7-3cbb9b93b579 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.618181] env[69328]: DEBUG oslo_vmware.api [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Waiting for the task: (returnval){ [ 1222.618181] env[69328]: value = "task-3274327" [ 1222.618181] env[69328]: _type = "Task" [ 1222.618181] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.630357] env[69328]: DEBUG oslo_vmware.api [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Task: {'id': task-3274327, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.706063] env[69328]: DEBUG nova.compute.manager [req-e317a490-064a-43b7-93e3-0cbc8d54c966 req-0ee190a9-31d4-4cd5-aa97-4bfa8817113c service nova] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Received event network-vif-plugged-cf8b5fda-a9e2-4224-a9f4-4cc8b0dc8dc3 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1222.706494] env[69328]: DEBUG oslo_concurrency.lockutils [req-e317a490-064a-43b7-93e3-0cbc8d54c966 req-0ee190a9-31d4-4cd5-aa97-4bfa8817113c service nova] Acquiring lock "28d608b8-c06a-4e71-b3e2-94c63619cec0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1222.706810] env[69328]: DEBUG oslo_concurrency.lockutils [req-e317a490-064a-43b7-93e3-0cbc8d54c966 req-0ee190a9-31d4-4cd5-aa97-4bfa8817113c service nova] Lock "28d608b8-c06a-4e71-b3e2-94c63619cec0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1222.707051] env[69328]: DEBUG oslo_concurrency.lockutils [req-e317a490-064a-43b7-93e3-0cbc8d54c966 req-0ee190a9-31d4-4cd5-aa97-4bfa8817113c service nova] Lock "28d608b8-c06a-4e71-b3e2-94c63619cec0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1222.707278] env[69328]: DEBUG nova.compute.manager [req-e317a490-064a-43b7-93e3-0cbc8d54c966 req-0ee190a9-31d4-4cd5-aa97-4bfa8817113c service nova] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] No waiting events found dispatching network-vif-plugged-cf8b5fda-a9e2-4224-a9f4-4cc8b0dc8dc3 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1222.707513] env[69328]: WARNING nova.compute.manager [req-e317a490-064a-43b7-93e3-0cbc8d54c966 req-0ee190a9-31d4-4cd5-aa97-4bfa8817113c service nova] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Received unexpected event network-vif-plugged-cf8b5fda-a9e2-4224-a9f4-4cc8b0dc8dc3 for instance with vm_state building and task_state spawning. [ 1222.800897] env[69328]: DEBUG nova.network.neutron [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Successfully updated port: cf8b5fda-a9e2-4224-a9f4-4cc8b0dc8dc3 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1223.128396] env[69328]: DEBUG oslo_vmware.api [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Task: {'id': task-3274327, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.304780] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Acquiring lock "refresh_cache-28d608b8-c06a-4e71-b3e2-94c63619cec0" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1223.304948] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Acquired lock "refresh_cache-28d608b8-c06a-4e71-b3e2-94c63619cec0" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1223.305128] env[69328]: DEBUG nova.network.neutron [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1223.628662] env[69328]: DEBUG oslo_vmware.api [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Task: {'id': task-3274327, 'name': ReconfigVM_Task, 'duration_secs': 0.588907} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.629073] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] Reconfigured VM instance instance-0000007a to attach disk [datastore2] d1820cd5-bacb-4097-8d05-fffea8b64e2b/d1820cd5-bacb-4097-8d05-fffea8b64e2b.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1223.629553] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5295d0d1-e153-4b3d-ac52-3792057177dd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.635976] env[69328]: DEBUG oslo_vmware.api [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Waiting for the task: (returnval){ [ 1223.635976] env[69328]: value = "task-3274328" [ 1223.635976] env[69328]: _type = "Task" [ 1223.635976] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.643505] env[69328]: DEBUG oslo_vmware.api [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Task: {'id': task-3274328, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.839179] env[69328]: DEBUG nova.network.neutron [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1223.960794] env[69328]: DEBUG nova.network.neutron [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Updating instance_info_cache with network_info: [{"id": "cf8b5fda-a9e2-4224-a9f4-4cc8b0dc8dc3", "address": "fa:16:3e:38:a2:6c", "network": {"id": "cc24495f-8f1b-494e-82dc-f9af77c44b57", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1972816415-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f8074418da0e4a25bf9e2a46f08cf284", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae70d41-6ebf-472a-8504-6530eb37ea41", "external-id": "nsx-vlan-transportzone-576", "segmentation_id": 576, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf8b5fda-a9", "ovs_interfaceid": "cf8b5fda-a9e2-4224-a9f4-4cc8b0dc8dc3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1224.146809] env[69328]: DEBUG oslo_vmware.api [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Task: {'id': task-3274328, 'name': Rename_Task, 'duration_secs': 0.183141} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.147062] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1224.147314] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-88b2a83e-1c24-46ec-946a-983371f36f5e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.154096] env[69328]: DEBUG oslo_vmware.api [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Waiting for the task: (returnval){ [ 1224.154096] env[69328]: value = "task-3274329" [ 1224.154096] env[69328]: _type = "Task" [ 1224.154096] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.165956] env[69328]: DEBUG oslo_vmware.api [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Task: {'id': task-3274329, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.464428] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Releasing lock "refresh_cache-28d608b8-c06a-4e71-b3e2-94c63619cec0" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1224.464859] env[69328]: DEBUG nova.compute.manager [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Instance network_info: |[{"id": "cf8b5fda-a9e2-4224-a9f4-4cc8b0dc8dc3", "address": "fa:16:3e:38:a2:6c", "network": {"id": "cc24495f-8f1b-494e-82dc-f9af77c44b57", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1972816415-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f8074418da0e4a25bf9e2a46f08cf284", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae70d41-6ebf-472a-8504-6530eb37ea41", "external-id": "nsx-vlan-transportzone-576", "segmentation_id": 576, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf8b5fda-a9", "ovs_interfaceid": "cf8b5fda-a9e2-4224-a9f4-4cc8b0dc8dc3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1224.465444] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:38:a2:6c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cae70d41-6ebf-472a-8504-6530eb37ea41', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cf8b5fda-a9e2-4224-a9f4-4cc8b0dc8dc3', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1224.473221] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Creating folder: Project (f8074418da0e4a25bf9e2a46f08cf284). Parent ref: group-v653649. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1224.473519] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9465cec2-775e-437d-8596-5c8cabfa0103 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.484934] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Created folder: Project (f8074418da0e4a25bf9e2a46f08cf284) in parent group-v653649. [ 1224.485183] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Creating folder: Instances. Parent ref: group-v653979. {{(pid=69328) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1224.485469] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b230d480-d17a-4993-b63d-e6003b74c2ac {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.494669] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Created folder: Instances in parent group-v653979. [ 1224.495038] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1224.495182] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1224.495438] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6c13f057-ada6-4fb4-9781-c389aea32423 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.514258] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1224.514258] env[69328]: value = "task-3274332" [ 1224.514258] env[69328]: _type = "Task" [ 1224.514258] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.521682] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274332, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.664263] env[69328]: DEBUG oslo_vmware.api [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Task: {'id': task-3274329, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.735261] env[69328]: DEBUG nova.compute.manager [req-8afb9f49-32f8-4231-99c0-b6f6b2552f13 req-ad765850-b962-4f76-bdc1-41d69a498ba3 service nova] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Received event network-changed-cf8b5fda-a9e2-4224-a9f4-4cc8b0dc8dc3 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1224.735609] env[69328]: DEBUG nova.compute.manager [req-8afb9f49-32f8-4231-99c0-b6f6b2552f13 req-ad765850-b962-4f76-bdc1-41d69a498ba3 service nova] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Refreshing instance network info cache due to event network-changed-cf8b5fda-a9e2-4224-a9f4-4cc8b0dc8dc3. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1224.735880] env[69328]: DEBUG oslo_concurrency.lockutils [req-8afb9f49-32f8-4231-99c0-b6f6b2552f13 req-ad765850-b962-4f76-bdc1-41d69a498ba3 service nova] Acquiring lock "refresh_cache-28d608b8-c06a-4e71-b3e2-94c63619cec0" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1224.736069] env[69328]: DEBUG oslo_concurrency.lockutils [req-8afb9f49-32f8-4231-99c0-b6f6b2552f13 req-ad765850-b962-4f76-bdc1-41d69a498ba3 service nova] Acquired lock "refresh_cache-28d608b8-c06a-4e71-b3e2-94c63619cec0" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1224.736276] env[69328]: DEBUG nova.network.neutron [req-8afb9f49-32f8-4231-99c0-b6f6b2552f13 req-ad765850-b962-4f76-bdc1-41d69a498ba3 service nova] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Refreshing network info cache for port cf8b5fda-a9e2-4224-a9f4-4cc8b0dc8dc3 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1225.024997] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274332, 'name': CreateVM_Task, 'duration_secs': 0.454112} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1225.025202] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1225.025895] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1225.026076] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1225.026417] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1225.026685] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27a788c9-7082-403e-b143-cec694033a98 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.031233] env[69328]: DEBUG oslo_vmware.api [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Waiting for the task: (returnval){ [ 1225.031233] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]526503eb-21c2-eeeb-11d1-a7e1f6ae6a43" [ 1225.031233] env[69328]: _type = "Task" [ 1225.031233] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1225.039125] env[69328]: DEBUG oslo_vmware.api [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]526503eb-21c2-eeeb-11d1-a7e1f6ae6a43, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.165503] env[69328]: DEBUG oslo_vmware.api [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Task: {'id': task-3274329, 'name': PowerOnVM_Task, 'duration_secs': 0.739328} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1225.165877] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1225.166141] env[69328]: INFO nova.compute.manager [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] Took 9.83 seconds to spawn the instance on the hypervisor. [ 1225.166441] env[69328]: DEBUG nova.compute.manager [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1225.167407] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6e9808c-6345-4b6b-91e0-eb4096b45690 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.422454] env[69328]: DEBUG nova.network.neutron [req-8afb9f49-32f8-4231-99c0-b6f6b2552f13 req-ad765850-b962-4f76-bdc1-41d69a498ba3 service nova] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Updated VIF entry in instance network info cache for port cf8b5fda-a9e2-4224-a9f4-4cc8b0dc8dc3. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1225.422874] env[69328]: DEBUG nova.network.neutron [req-8afb9f49-32f8-4231-99c0-b6f6b2552f13 req-ad765850-b962-4f76-bdc1-41d69a498ba3 service nova] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Updating instance_info_cache with network_info: [{"id": "cf8b5fda-a9e2-4224-a9f4-4cc8b0dc8dc3", "address": "fa:16:3e:38:a2:6c", "network": {"id": "cc24495f-8f1b-494e-82dc-f9af77c44b57", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1972816415-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f8074418da0e4a25bf9e2a46f08cf284", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae70d41-6ebf-472a-8504-6530eb37ea41", "external-id": "nsx-vlan-transportzone-576", "segmentation_id": 576, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf8b5fda-a9", "ovs_interfaceid": "cf8b5fda-a9e2-4224-a9f4-4cc8b0dc8dc3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1225.542337] env[69328]: DEBUG oslo_vmware.api [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]526503eb-21c2-eeeb-11d1-a7e1f6ae6a43, 'name': SearchDatastore_Task, 'duration_secs': 0.01438} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1225.542689] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1225.542932] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1225.543225] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1225.543375] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1225.543568] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1225.543887] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ec82c4a0-a846-4311-b243-40198c7f1cf8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.553329] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1225.553474] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1225.554256] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8636e7b5-3004-4fd6-aaa6-b01f2d8d51b2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.560152] env[69328]: DEBUG oslo_vmware.api [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Waiting for the task: (returnval){ [ 1225.560152] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5206e9a6-713f-f6d8-bf54-c7da73c43053" [ 1225.560152] env[69328]: _type = "Task" [ 1225.560152] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1225.569026] env[69328]: DEBUG oslo_vmware.api [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5206e9a6-713f-f6d8-bf54-c7da73c43053, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.689203] env[69328]: INFO nova.compute.manager [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] Took 15.28 seconds to build instance. [ 1225.927746] env[69328]: DEBUG oslo_concurrency.lockutils [req-8afb9f49-32f8-4231-99c0-b6f6b2552f13 req-ad765850-b962-4f76-bdc1-41d69a498ba3 service nova] Releasing lock "refresh_cache-28d608b8-c06a-4e71-b3e2-94c63619cec0" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1226.071268] env[69328]: DEBUG oslo_vmware.api [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5206e9a6-713f-f6d8-bf54-c7da73c43053, 'name': SearchDatastore_Task, 'duration_secs': 0.015042} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1226.072230] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6c177ad-de16-449f-81aa-4bbc9d4ae486 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.078603] env[69328]: DEBUG oslo_vmware.api [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Waiting for the task: (returnval){ [ 1226.078603] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b250e2-9702-f5be-527d-2791e8f58cfd" [ 1226.078603] env[69328]: _type = "Task" [ 1226.078603] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.086294] env[69328]: DEBUG oslo_vmware.api [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b250e2-9702-f5be-527d-2791e8f58cfd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.092826] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d2b7ece0-cf6a-4374-b78b-ca7f523cc4d9 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Acquiring lock "d1820cd5-bacb-4097-8d05-fffea8b64e2b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1226.192955] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e475c2b6-fd35-46ee-8b71-dd8f05d9b559 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Lock "d1820cd5-bacb-4097-8d05-fffea8b64e2b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.791s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1226.193294] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d2b7ece0-cf6a-4374-b78b-ca7f523cc4d9 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Lock "d1820cd5-bacb-4097-8d05-fffea8b64e2b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.100s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1226.193598] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d2b7ece0-cf6a-4374-b78b-ca7f523cc4d9 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Acquiring lock "d1820cd5-bacb-4097-8d05-fffea8b64e2b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1226.193853] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d2b7ece0-cf6a-4374-b78b-ca7f523cc4d9 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Lock "d1820cd5-bacb-4097-8d05-fffea8b64e2b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1226.194066] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d2b7ece0-cf6a-4374-b78b-ca7f523cc4d9 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Lock "d1820cd5-bacb-4097-8d05-fffea8b64e2b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1226.196488] env[69328]: INFO nova.compute.manager [None req-d2b7ece0-cf6a-4374-b78b-ca7f523cc4d9 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] Terminating instance [ 1226.590889] env[69328]: DEBUG oslo_vmware.api [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52b250e2-9702-f5be-527d-2791e8f58cfd, 'name': SearchDatastore_Task, 'duration_secs': 0.014414} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1226.591092] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1226.591292] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 28d608b8-c06a-4e71-b3e2-94c63619cec0/28d608b8-c06a-4e71-b3e2-94c63619cec0.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1226.591613] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8a5a2798-6cd0-472c-9748-148a1ebd4da7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.598931] env[69328]: DEBUG oslo_vmware.api [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Waiting for the task: (returnval){ [ 1226.598931] env[69328]: value = "task-3274333" [ 1226.598931] env[69328]: _type = "Task" [ 1226.598931] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.606766] env[69328]: DEBUG oslo_vmware.api [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Task: {'id': task-3274333, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.700458] env[69328]: DEBUG nova.compute.manager [None req-d2b7ece0-cf6a-4374-b78b-ca7f523cc4d9 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1226.700856] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d2b7ece0-cf6a-4374-b78b-ca7f523cc4d9 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1226.701642] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2793552d-aaea-4321-bb99-04ec1256ec27 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.709616] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2b7ece0-cf6a-4374-b78b-ca7f523cc4d9 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1226.709865] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-51998856-caad-46f9-baee-bb96eff027c4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.716595] env[69328]: DEBUG oslo_vmware.api [None req-d2b7ece0-cf6a-4374-b78b-ca7f523cc4d9 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Waiting for the task: (returnval){ [ 1226.716595] env[69328]: value = "task-3274334" [ 1226.716595] env[69328]: _type = "Task" [ 1226.716595] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.726382] env[69328]: DEBUG oslo_vmware.api [None req-d2b7ece0-cf6a-4374-b78b-ca7f523cc4d9 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Task: {'id': task-3274334, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.110343] env[69328]: DEBUG oslo_vmware.api [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Task: {'id': task-3274333, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.227534] env[69328]: DEBUG oslo_vmware.api [None req-d2b7ece0-cf6a-4374-b78b-ca7f523cc4d9 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Task: {'id': task-3274334, 'name': PowerOffVM_Task, 'duration_secs': 0.269654} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1227.227834] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2b7ece0-cf6a-4374-b78b-ca7f523cc4d9 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1227.227969] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d2b7ece0-cf6a-4374-b78b-ca7f523cc4d9 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1227.228251] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bfbfa3dc-bc4c-4efb-b801-50aa7e6f4954 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.313034] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d2b7ece0-cf6a-4374-b78b-ca7f523cc4d9 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1227.313307] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d2b7ece0-cf6a-4374-b78b-ca7f523cc4d9 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1227.313490] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2b7ece0-cf6a-4374-b78b-ca7f523cc4d9 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Deleting the datastore file [datastore2] d1820cd5-bacb-4097-8d05-fffea8b64e2b {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1227.313760] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3f8b13fc-8cde-4808-bb86-1e4ada38a239 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.321596] env[69328]: DEBUG oslo_vmware.api [None req-d2b7ece0-cf6a-4374-b78b-ca7f523cc4d9 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Waiting for the task: (returnval){ [ 1227.321596] env[69328]: value = "task-3274336" [ 1227.321596] env[69328]: _type = "Task" [ 1227.321596] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.330307] env[69328]: DEBUG oslo_vmware.api [None req-d2b7ece0-cf6a-4374-b78b-ca7f523cc4d9 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Task: {'id': task-3274336, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.609921] env[69328]: DEBUG oslo_vmware.api [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Task: {'id': task-3274333, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.658455} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1227.610191] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 28d608b8-c06a-4e71-b3e2-94c63619cec0/28d608b8-c06a-4e71-b3e2-94c63619cec0.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1227.610413] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1227.610684] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-21ecca9a-ec67-4199-8e51-09f8ff80049c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.618417] env[69328]: DEBUG oslo_vmware.api [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Waiting for the task: (returnval){ [ 1227.618417] env[69328]: value = "task-3274337" [ 1227.618417] env[69328]: _type = "Task" [ 1227.618417] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.626397] env[69328]: DEBUG oslo_vmware.api [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Task: {'id': task-3274337, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.831679] env[69328]: DEBUG oslo_vmware.api [None req-d2b7ece0-cf6a-4374-b78b-ca7f523cc4d9 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Task: {'id': task-3274336, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.128724] env[69328]: DEBUG oslo_vmware.api [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Task: {'id': task-3274337, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.205402} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.128985] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1228.129778] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b800f33e-96e0-4804-b30a-dd0ef9c5f720 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.151900] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Reconfiguring VM instance instance-0000007b to attach disk [datastore1] 28d608b8-c06a-4e71-b3e2-94c63619cec0/28d608b8-c06a-4e71-b3e2-94c63619cec0.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1228.152243] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-36457065-5cf1-43ed-83d8-3627bebad0eb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.171358] env[69328]: DEBUG oslo_vmware.api [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Waiting for the task: (returnval){ [ 1228.171358] env[69328]: value = "task-3274338" [ 1228.171358] env[69328]: _type = "Task" [ 1228.171358] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1228.180126] env[69328]: DEBUG oslo_vmware.api [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Task: {'id': task-3274338, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.332472] env[69328]: DEBUG oslo_vmware.api [None req-d2b7ece0-cf6a-4374-b78b-ca7f523cc4d9 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Task: {'id': task-3274336, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.843105} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.332758] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2b7ece0-cf6a-4374-b78b-ca7f523cc4d9 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1228.332857] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d2b7ece0-cf6a-4374-b78b-ca7f523cc4d9 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1228.333047] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d2b7ece0-cf6a-4374-b78b-ca7f523cc4d9 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1228.333225] env[69328]: INFO nova.compute.manager [None req-d2b7ece0-cf6a-4374-b78b-ca7f523cc4d9 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] Took 1.63 seconds to destroy the instance on the hypervisor. [ 1228.333496] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d2b7ece0-cf6a-4374-b78b-ca7f523cc4d9 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1228.333693] env[69328]: DEBUG nova.compute.manager [-] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1228.333786] env[69328]: DEBUG nova.network.neutron [-] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1228.637476] env[69328]: DEBUG nova.compute.manager [req-f0d8e825-45ee-4621-ac79-b97349cde763 req-c93589af-05bd-4c18-87d5-7950c141d80b service nova] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] Received event network-vif-deleted-c5384f33-ee5c-4e98-b375-1ff0f83ac920 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1228.637476] env[69328]: INFO nova.compute.manager [req-f0d8e825-45ee-4621-ac79-b97349cde763 req-c93589af-05bd-4c18-87d5-7950c141d80b service nova] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] Neutron deleted interface c5384f33-ee5c-4e98-b375-1ff0f83ac920; detaching it from the instance and deleting it from the info cache [ 1228.637476] env[69328]: DEBUG nova.network.neutron [req-f0d8e825-45ee-4621-ac79-b97349cde763 req-c93589af-05bd-4c18-87d5-7950c141d80b service nova] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1228.682232] env[69328]: DEBUG oslo_vmware.api [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Task: {'id': task-3274338, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.118036] env[69328]: DEBUG nova.network.neutron [-] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1229.139030] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a4ab7184-1738-4082-aa2e-baa86432cbb0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.149204] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88d41b48-d45f-4d42-93f2-3864061f4a66 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.180539] env[69328]: DEBUG nova.compute.manager [req-f0d8e825-45ee-4621-ac79-b97349cde763 req-c93589af-05bd-4c18-87d5-7950c141d80b service nova] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] Detach interface failed, port_id=c5384f33-ee5c-4e98-b375-1ff0f83ac920, reason: Instance d1820cd5-bacb-4097-8d05-fffea8b64e2b could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1229.189304] env[69328]: DEBUG oslo_vmware.api [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Task: {'id': task-3274338, 'name': ReconfigVM_Task, 'duration_secs': 0.714592} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1229.189703] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Reconfigured VM instance instance-0000007b to attach disk [datastore1] 28d608b8-c06a-4e71-b3e2-94c63619cec0/28d608b8-c06a-4e71-b3e2-94c63619cec0.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1229.190328] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7ba8a516-31ab-4aa5-9f78-6bd5a002bec7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.197686] env[69328]: DEBUG oslo_vmware.api [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Waiting for the task: (returnval){ [ 1229.197686] env[69328]: value = "task-3274339" [ 1229.197686] env[69328]: _type = "Task" [ 1229.197686] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1229.206235] env[69328]: DEBUG oslo_vmware.api [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Task: {'id': task-3274339, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.621059] env[69328]: INFO nova.compute.manager [-] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] Took 1.29 seconds to deallocate network for instance. [ 1229.684459] env[69328]: DEBUG oslo_vmware.rw_handles [None req-464b5a4b-dce8-43fc-89dd-c807a3673b42 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52343352-848c-e35e-d657-78ba2956cfcf/disk-0.vmdk. {{(pid=69328) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1229.685552] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59a3b1ee-e941-4178-84cc-16bf4f38d641 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.692489] env[69328]: DEBUG oslo_vmware.rw_handles [None req-464b5a4b-dce8-43fc-89dd-c807a3673b42 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52343352-848c-e35e-d657-78ba2956cfcf/disk-0.vmdk is in state: ready. {{(pid=69328) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1229.692654] env[69328]: ERROR oslo_vmware.rw_handles [None req-464b5a4b-dce8-43fc-89dd-c807a3673b42 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52343352-848c-e35e-d657-78ba2956cfcf/disk-0.vmdk due to incomplete transfer. [ 1229.692865] env[69328]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-54aca73b-1ffe-4c3f-8d47-55affd92cfcb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.702212] env[69328]: DEBUG oslo_vmware.rw_handles [None req-464b5a4b-dce8-43fc-89dd-c807a3673b42 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52343352-848c-e35e-d657-78ba2956cfcf/disk-0.vmdk. {{(pid=69328) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1229.702408] env[69328]: DEBUG nova.virt.vmwareapi.images [None req-464b5a4b-dce8-43fc-89dd-c807a3673b42 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Uploaded image 367697f9-6e5d-4706-b316-2b0bb5022954 to the Glance image server {{(pid=69328) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1229.704655] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-464b5a4b-dce8-43fc-89dd-c807a3673b42 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Destroying the VM {{(pid=69328) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1229.705171] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-8b1824c0-85b0-4a77-b673-d15106c97f6a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.709412] env[69328]: DEBUG oslo_vmware.api [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Task: {'id': task-3274339, 'name': Rename_Task, 'duration_secs': 0.163129} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1229.709925] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1229.710260] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-23bcb88a-f858-4a5a-a1a9-56d12e517bc6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.713354] env[69328]: DEBUG oslo_vmware.api [None req-464b5a4b-dce8-43fc-89dd-c807a3673b42 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1229.713354] env[69328]: value = "task-3274340" [ 1229.713354] env[69328]: _type = "Task" [ 1229.713354] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1229.717172] env[69328]: DEBUG oslo_vmware.api [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Waiting for the task: (returnval){ [ 1229.717172] env[69328]: value = "task-3274341" [ 1229.717172] env[69328]: _type = "Task" [ 1229.717172] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1229.722587] env[69328]: DEBUG oslo_vmware.api [None req-464b5a4b-dce8-43fc-89dd-c807a3673b42 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274340, 'name': Destroy_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.727239] env[69328]: DEBUG oslo_vmware.api [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Task: {'id': task-3274341, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.127328] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d2b7ece0-cf6a-4374-b78b-ca7f523cc4d9 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1230.127666] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d2b7ece0-cf6a-4374-b78b-ca7f523cc4d9 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1230.127933] env[69328]: DEBUG nova.objects.instance [None req-d2b7ece0-cf6a-4374-b78b-ca7f523cc4d9 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Lazy-loading 'resources' on Instance uuid d1820cd5-bacb-4097-8d05-fffea8b64e2b {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1230.226093] env[69328]: DEBUG oslo_vmware.api [None req-464b5a4b-dce8-43fc-89dd-c807a3673b42 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274340, 'name': Destroy_Task, 'duration_secs': 0.468455} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1230.229192] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-464b5a4b-dce8-43fc-89dd-c807a3673b42 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Destroyed the VM [ 1230.229443] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-464b5a4b-dce8-43fc-89dd-c807a3673b42 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Deleting Snapshot of the VM instance {{(pid=69328) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1230.229712] env[69328]: DEBUG oslo_vmware.api [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Task: {'id': task-3274341, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.229929] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-1ccdd1ab-71ca-479c-9cea-4067c7611edb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.237254] env[69328]: DEBUG oslo_vmware.api [None req-464b5a4b-dce8-43fc-89dd-c807a3673b42 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1230.237254] env[69328]: value = "task-3274342" [ 1230.237254] env[69328]: _type = "Task" [ 1230.237254] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1230.244820] env[69328]: DEBUG oslo_vmware.api [None req-464b5a4b-dce8-43fc-89dd-c807a3673b42 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274342, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.712100] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c36303c7-660f-485e-afa8-1a7e7b017223 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.722434] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-081cf0e0-d44d-4bf2-a4a5-f70f986d9845 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.730085] env[69328]: DEBUG oslo_vmware.api [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Task: {'id': task-3274341, 'name': PowerOnVM_Task, 'duration_secs': 0.573704} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1230.754622] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1230.754834] env[69328]: INFO nova.compute.manager [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Took 8.35 seconds to spawn the instance on the hypervisor. [ 1230.755034] env[69328]: DEBUG nova.compute.manager [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1230.755999] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c76a9e23-c49a-4347-8574-12c568193315 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.761158] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8733d68b-108c-4b87-b912-5d42a0019948 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.772803] env[69328]: DEBUG oslo_vmware.api [None req-464b5a4b-dce8-43fc-89dd-c807a3673b42 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274342, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.774261] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31f02b38-18d4-44a2-9ee4-3364cba33822 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.787099] env[69328]: DEBUG nova.compute.provider_tree [None req-d2b7ece0-cf6a-4374-b78b-ca7f523cc4d9 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1231.265579] env[69328]: DEBUG oslo_vmware.api [None req-464b5a4b-dce8-43fc-89dd-c807a3673b42 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274342, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.281919] env[69328]: INFO nova.compute.manager [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Took 13.11 seconds to build instance. [ 1231.289637] env[69328]: DEBUG nova.scheduler.client.report [None req-d2b7ece0-cf6a-4374-b78b-ca7f523cc4d9 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1231.650307] env[69328]: INFO nova.compute.manager [None req-5f083486-9587-40b9-a59a-ec64cbe0ecca tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Rescuing [ 1231.650650] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5f083486-9587-40b9-a59a-ec64cbe0ecca tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Acquiring lock "refresh_cache-28d608b8-c06a-4e71-b3e2-94c63619cec0" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1231.650861] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5f083486-9587-40b9-a59a-ec64cbe0ecca tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Acquired lock "refresh_cache-28d608b8-c06a-4e71-b3e2-94c63619cec0" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1231.651053] env[69328]: DEBUG nova.network.neutron [None req-5f083486-9587-40b9-a59a-ec64cbe0ecca tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1231.768304] env[69328]: DEBUG oslo_vmware.api [None req-464b5a4b-dce8-43fc-89dd-c807a3673b42 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274342, 'name': RemoveSnapshot_Task, 'duration_secs': 1.365134} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1231.768608] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-464b5a4b-dce8-43fc-89dd-c807a3673b42 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Deleted Snapshot of the VM instance {{(pid=69328) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1231.768877] env[69328]: INFO nova.compute.manager [None req-464b5a4b-dce8-43fc-89dd-c807a3673b42 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Took 15.20 seconds to snapshot the instance on the hypervisor. [ 1231.783985] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a46925df-d23c-4dd9-a77b-3e186fd7cb90 tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Lock "28d608b8-c06a-4e71-b3e2-94c63619cec0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.622s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1231.794165] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d2b7ece0-cf6a-4374-b78b-ca7f523cc4d9 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.666s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1231.813708] env[69328]: INFO nova.scheduler.client.report [None req-d2b7ece0-cf6a-4374-b78b-ca7f523cc4d9 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Deleted allocations for instance d1820cd5-bacb-4097-8d05-fffea8b64e2b [ 1232.308678] env[69328]: DEBUG nova.compute.manager [None req-464b5a4b-dce8-43fc-89dd-c807a3673b42 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Found 1 images (rotation: 2) {{(pid=69328) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 1232.321673] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d2b7ece0-cf6a-4374-b78b-ca7f523cc4d9 tempest-InstanceActionsV221TestJSON-678336565 tempest-InstanceActionsV221TestJSON-678336565-project-member] Lock "d1820cd5-bacb-4097-8d05-fffea8b64e2b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.128s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1232.334875] env[69328]: DEBUG nova.network.neutron [None req-5f083486-9587-40b9-a59a-ec64cbe0ecca tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Updating instance_info_cache with network_info: [{"id": "cf8b5fda-a9e2-4224-a9f4-4cc8b0dc8dc3", "address": "fa:16:3e:38:a2:6c", "network": {"id": "cc24495f-8f1b-494e-82dc-f9af77c44b57", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1972816415-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f8074418da0e4a25bf9e2a46f08cf284", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae70d41-6ebf-472a-8504-6530eb37ea41", "external-id": "nsx-vlan-transportzone-576", "segmentation_id": 576, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf8b5fda-a9", "ovs_interfaceid": "cf8b5fda-a9e2-4224-a9f4-4cc8b0dc8dc3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1232.837027] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5f083486-9587-40b9-a59a-ec64cbe0ecca tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Releasing lock "refresh_cache-28d608b8-c06a-4e71-b3e2-94c63619cec0" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1233.545261] env[69328]: DEBUG nova.compute.manager [None req-3676db1a-46bf-4b77-ba16-541e87c8f7d0 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1233.546321] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d28d3ea-63c8-4c7b-b60b-005101f148e1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.058887] env[69328]: INFO nova.compute.manager [None req-3676db1a-46bf-4b77-ba16-541e87c8f7d0 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] instance snapshotting [ 1234.058887] env[69328]: DEBUG nova.objects.instance [None req-3676db1a-46bf-4b77-ba16-541e87c8f7d0 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lazy-loading 'flavor' on Instance uuid 0cf68559-5f07-4006-9f7f-59027e31635d {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1234.370350] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f083486-9587-40b9-a59a-ec64cbe0ecca tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1234.370636] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ebe5cc77-44cf-441a-b4ce-019c3e3e9ff5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.378405] env[69328]: DEBUG oslo_vmware.api [None req-5f083486-9587-40b9-a59a-ec64cbe0ecca tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Waiting for the task: (returnval){ [ 1234.378405] env[69328]: value = "task-3274343" [ 1234.378405] env[69328]: _type = "Task" [ 1234.378405] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1234.386228] env[69328]: DEBUG oslo_vmware.api [None req-5f083486-9587-40b9-a59a-ec64cbe0ecca tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Task: {'id': task-3274343, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.565626] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75830a6b-dc8e-42a3-988c-bf7f205fcd58 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.589300] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5613c2ac-95e9-4770-8c5b-01f37be6cb5c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.746763] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a141a5a7-2f9b-4324-ba61-9122e3470f32 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Acquiring lock "d19f6a2a-3a16-4031-8c20-143ccfd6f5f5" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1234.746971] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a141a5a7-2f9b-4324-ba61-9122e3470f32 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lock "d19f6a2a-3a16-4031-8c20-143ccfd6f5f5" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1234.889032] env[69328]: DEBUG oslo_vmware.api [None req-5f083486-9587-40b9-a59a-ec64cbe0ecca tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Task: {'id': task-3274343, 'name': PowerOffVM_Task, 'duration_secs': 0.181091} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1234.889032] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f083486-9587-40b9-a59a-ec64cbe0ecca tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1234.889403] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7012925c-8d0b-4ea3-bde0-de2cfac56408 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.906905] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35a80b7e-b7d1-4643-a88d-67d77cb77c88 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.940219] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f083486-9587-40b9-a59a-ec64cbe0ecca tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1234.940544] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a1f532bc-1c21-4a23-8aab-2af29aabe5e0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.947058] env[69328]: DEBUG oslo_vmware.api [None req-5f083486-9587-40b9-a59a-ec64cbe0ecca tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Waiting for the task: (returnval){ [ 1234.947058] env[69328]: value = "task-3274344" [ 1234.947058] env[69328]: _type = "Task" [ 1234.947058] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1234.954802] env[69328]: DEBUG oslo_vmware.api [None req-5f083486-9587-40b9-a59a-ec64cbe0ecca tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Task: {'id': task-3274344, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.101704] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3676db1a-46bf-4b77-ba16-541e87c8f7d0 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Creating Snapshot of the VM instance {{(pid=69328) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1235.102092] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-c0479775-f706-49ca-957a-10e138ee0b8f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.109803] env[69328]: DEBUG oslo_vmware.api [None req-3676db1a-46bf-4b77-ba16-541e87c8f7d0 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1235.109803] env[69328]: value = "task-3274345" [ 1235.109803] env[69328]: _type = "Task" [ 1235.109803] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.117682] env[69328]: DEBUG oslo_vmware.api [None req-3676db1a-46bf-4b77-ba16-541e87c8f7d0 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274345, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.250986] env[69328]: DEBUG nova.compute.utils [None req-a141a5a7-2f9b-4324-ba61-9122e3470f32 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1235.457714] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f083486-9587-40b9-a59a-ec64cbe0ecca tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] VM already powered off {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1235.457925] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-5f083486-9587-40b9-a59a-ec64cbe0ecca tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1235.458185] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5f083486-9587-40b9-a59a-ec64cbe0ecca tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1235.458333] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5f083486-9587-40b9-a59a-ec64cbe0ecca tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1235.458506] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f083486-9587-40b9-a59a-ec64cbe0ecca tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1235.458746] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ec285551-ab6b-4349-afa0-5598bb6e1639 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.467557] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f083486-9587-40b9-a59a-ec64cbe0ecca tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1235.467719] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-5f083486-9587-40b9-a59a-ec64cbe0ecca tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1235.468391] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f454555-d9ac-4f85-9dc8-a3708ebccdf9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.473393] env[69328]: DEBUG oslo_vmware.api [None req-5f083486-9587-40b9-a59a-ec64cbe0ecca tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Waiting for the task: (returnval){ [ 1235.473393] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ac4441-ee8d-ee13-a8e9-039deb7ab656" [ 1235.473393] env[69328]: _type = "Task" [ 1235.473393] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.480568] env[69328]: DEBUG oslo_vmware.api [None req-5f083486-9587-40b9-a59a-ec64cbe0ecca tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ac4441-ee8d-ee13-a8e9-039deb7ab656, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.619939] env[69328]: DEBUG oslo_vmware.api [None req-3676db1a-46bf-4b77-ba16-541e87c8f7d0 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274345, 'name': CreateSnapshot_Task, 'duration_secs': 0.457231} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.620350] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3676db1a-46bf-4b77-ba16-541e87c8f7d0 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Created Snapshot of the VM instance {{(pid=69328) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1235.620943] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b84a2dd7-eb26-46b9-9a97-a71b9f57e482 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.754301] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a141a5a7-2f9b-4324-ba61-9122e3470f32 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lock "d19f6a2a-3a16-4031-8c20-143ccfd6f5f5" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1235.984305] env[69328]: DEBUG oslo_vmware.api [None req-5f083486-9587-40b9-a59a-ec64cbe0ecca tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52ac4441-ee8d-ee13-a8e9-039deb7ab656, 'name': SearchDatastore_Task, 'duration_secs': 0.009103} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.984993] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3935bb6b-40b5-4845-a823-233c0cb692ef {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.989706] env[69328]: DEBUG oslo_vmware.api [None req-5f083486-9587-40b9-a59a-ec64cbe0ecca tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Waiting for the task: (returnval){ [ 1235.989706] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52a61a19-65e3-db4a-a84e-56860c05289d" [ 1235.989706] env[69328]: _type = "Task" [ 1235.989706] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.996689] env[69328]: DEBUG oslo_vmware.api [None req-5f083486-9587-40b9-a59a-ec64cbe0ecca tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52a61a19-65e3-db4a-a84e-56860c05289d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.139191] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3676db1a-46bf-4b77-ba16-541e87c8f7d0 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Creating linked-clone VM from snapshot {{(pid=69328) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1236.139504] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-38da1a38-851d-4b23-b5b9-36f9715d57ba {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.148682] env[69328]: DEBUG oslo_vmware.api [None req-3676db1a-46bf-4b77-ba16-541e87c8f7d0 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1236.148682] env[69328]: value = "task-3274346" [ 1236.148682] env[69328]: _type = "Task" [ 1236.148682] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.156619] env[69328]: DEBUG oslo_vmware.api [None req-3676db1a-46bf-4b77-ba16-541e87c8f7d0 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274346, 'name': CloneVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.500571] env[69328]: DEBUG oslo_vmware.api [None req-5f083486-9587-40b9-a59a-ec64cbe0ecca tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52a61a19-65e3-db4a-a84e-56860c05289d, 'name': SearchDatastore_Task, 'duration_secs': 0.009783} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.500853] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5f083486-9587-40b9-a59a-ec64cbe0ecca tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1236.501103] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f083486-9587-40b9-a59a-ec64cbe0ecca tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 28d608b8-c06a-4e71-b3e2-94c63619cec0/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318-rescue.vmdk. {{(pid=69328) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1236.501378] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b4f38ea5-2ddb-41c1-ae0c-d7bb1d6264be {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.508066] env[69328]: DEBUG oslo_vmware.api [None req-5f083486-9587-40b9-a59a-ec64cbe0ecca tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Waiting for the task: (returnval){ [ 1236.508066] env[69328]: value = "task-3274347" [ 1236.508066] env[69328]: _type = "Task" [ 1236.508066] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.515634] env[69328]: DEBUG oslo_vmware.api [None req-5f083486-9587-40b9-a59a-ec64cbe0ecca tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Task: {'id': task-3274347, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.658510] env[69328]: DEBUG oslo_vmware.api [None req-3676db1a-46bf-4b77-ba16-541e87c8f7d0 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274346, 'name': CloneVM_Task} progress is 94%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.818997] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a141a5a7-2f9b-4324-ba61-9122e3470f32 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Acquiring lock "d19f6a2a-3a16-4031-8c20-143ccfd6f5f5" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1236.819306] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a141a5a7-2f9b-4324-ba61-9122e3470f32 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lock "d19f6a2a-3a16-4031-8c20-143ccfd6f5f5" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1236.819603] env[69328]: INFO nova.compute.manager [None req-a141a5a7-2f9b-4324-ba61-9122e3470f32 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Attaching volume 01dd0cf8-98ef-4f40-b76d-352f74c03810 to /dev/sdb [ 1236.857257] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-945d8063-9add-47ad-acfe-cc5298077537 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.864622] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f222919d-18dd-4061-8c54-4113945fa37d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.878033] env[69328]: DEBUG nova.virt.block_device [None req-a141a5a7-2f9b-4324-ba61-9122e3470f32 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Updating existing volume attachment record: f707f7aa-455f-415c-bae3-ff35efead3fd {{(pid=69328) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1237.017764] env[69328]: DEBUG oslo_vmware.api [None req-5f083486-9587-40b9-a59a-ec64cbe0ecca tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Task: {'id': task-3274347, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.453714} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1237.018042] env[69328]: INFO nova.virt.vmwareapi.ds_util [None req-5f083486-9587-40b9-a59a-ec64cbe0ecca tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 28d608b8-c06a-4e71-b3e2-94c63619cec0/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318-rescue.vmdk. [ 1237.018830] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab807227-5966-4f6c-bfb7-0e3783cba1bc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.042831] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f083486-9587-40b9-a59a-ec64cbe0ecca tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Reconfiguring VM instance instance-0000007b to attach disk [datastore1] 28d608b8-c06a-4e71-b3e2-94c63619cec0/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318-rescue.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1237.043106] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d8085f50-15ea-4922-b096-3e16cfa38ff2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.059267] env[69328]: DEBUG oslo_vmware.api [None req-5f083486-9587-40b9-a59a-ec64cbe0ecca tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Waiting for the task: (returnval){ [ 1237.059267] env[69328]: value = "task-3274350" [ 1237.059267] env[69328]: _type = "Task" [ 1237.059267] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.068417] env[69328]: DEBUG oslo_vmware.api [None req-5f083486-9587-40b9-a59a-ec64cbe0ecca tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Task: {'id': task-3274350, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.159410] env[69328]: DEBUG oslo_vmware.api [None req-3676db1a-46bf-4b77-ba16-541e87c8f7d0 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274346, 'name': CloneVM_Task} progress is 95%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.568933] env[69328]: DEBUG oslo_vmware.api [None req-5f083486-9587-40b9-a59a-ec64cbe0ecca tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Task: {'id': task-3274350, 'name': ReconfigVM_Task, 'duration_secs': 0.300001} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1237.569247] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f083486-9587-40b9-a59a-ec64cbe0ecca tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Reconfigured VM instance instance-0000007b to attach disk [datastore1] 28d608b8-c06a-4e71-b3e2-94c63619cec0/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318-rescue.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1237.570079] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae127625-7d28-4eec-b813-fd2f53b941f7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.594433] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d26dd425-3130-4bb8-b437-0c981e880372 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.608050] env[69328]: DEBUG oslo_vmware.api [None req-5f083486-9587-40b9-a59a-ec64cbe0ecca tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Waiting for the task: (returnval){ [ 1237.608050] env[69328]: value = "task-3274352" [ 1237.608050] env[69328]: _type = "Task" [ 1237.608050] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.615265] env[69328]: DEBUG oslo_vmware.api [None req-5f083486-9587-40b9-a59a-ec64cbe0ecca tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Task: {'id': task-3274352, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.658464] env[69328]: DEBUG oslo_vmware.api [None req-3676db1a-46bf-4b77-ba16-541e87c8f7d0 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274346, 'name': CloneVM_Task, 'duration_secs': 1.326981} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1237.658694] env[69328]: INFO nova.virt.vmwareapi.vmops [None req-3676db1a-46bf-4b77-ba16-541e87c8f7d0 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Created linked-clone VM from snapshot [ 1237.659424] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a68e8351-85f4-47f3-9909-0bd00dd1b972 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.666334] env[69328]: DEBUG nova.virt.vmwareapi.images [None req-3676db1a-46bf-4b77-ba16-541e87c8f7d0 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Uploading image 4b4a918b-2d0f-48d1-9439-8f2a9216b87d {{(pid=69328) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1237.686899] env[69328]: DEBUG oslo_vmware.rw_handles [None req-3676db1a-46bf-4b77-ba16-541e87c8f7d0 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1237.686899] env[69328]: value = "vm-653983" [ 1237.686899] env[69328]: _type = "VirtualMachine" [ 1237.686899] env[69328]: }. {{(pid=69328) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1237.687180] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-daf278b5-0285-44f6-80f6-482b159e28c1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.693915] env[69328]: DEBUG oslo_vmware.rw_handles [None req-3676db1a-46bf-4b77-ba16-541e87c8f7d0 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lease: (returnval){ [ 1237.693915] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e13458-6786-a96e-9585-812d1f584332" [ 1237.693915] env[69328]: _type = "HttpNfcLease" [ 1237.693915] env[69328]: } obtained for exporting VM: (result){ [ 1237.693915] env[69328]: value = "vm-653983" [ 1237.693915] env[69328]: _type = "VirtualMachine" [ 1237.693915] env[69328]: }. {{(pid=69328) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1237.694290] env[69328]: DEBUG oslo_vmware.api [None req-3676db1a-46bf-4b77-ba16-541e87c8f7d0 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the lease: (returnval){ [ 1237.694290] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e13458-6786-a96e-9585-812d1f584332" [ 1237.694290] env[69328]: _type = "HttpNfcLease" [ 1237.694290] env[69328]: } to be ready. {{(pid=69328) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1237.700231] env[69328]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1237.700231] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e13458-6786-a96e-9585-812d1f584332" [ 1237.700231] env[69328]: _type = "HttpNfcLease" [ 1237.700231] env[69328]: } is initializing. {{(pid=69328) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1238.117636] env[69328]: DEBUG oslo_vmware.api [None req-5f083486-9587-40b9-a59a-ec64cbe0ecca tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Task: {'id': task-3274352, 'name': ReconfigVM_Task, 'duration_secs': 0.16499} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.117874] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f083486-9587-40b9-a59a-ec64cbe0ecca tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1238.118118] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6a33f8dd-eba8-4c78-a635-50070fcceb72 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.124520] env[69328]: DEBUG oslo_vmware.api [None req-5f083486-9587-40b9-a59a-ec64cbe0ecca tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Waiting for the task: (returnval){ [ 1238.124520] env[69328]: value = "task-3274354" [ 1238.124520] env[69328]: _type = "Task" [ 1238.124520] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.131625] env[69328]: DEBUG oslo_vmware.api [None req-5f083486-9587-40b9-a59a-ec64cbe0ecca tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Task: {'id': task-3274354, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.202191] env[69328]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1238.202191] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e13458-6786-a96e-9585-812d1f584332" [ 1238.202191] env[69328]: _type = "HttpNfcLease" [ 1238.202191] env[69328]: } is ready. {{(pid=69328) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1238.202583] env[69328]: DEBUG oslo_vmware.rw_handles [None req-3676db1a-46bf-4b77-ba16-541e87c8f7d0 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1238.202583] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52e13458-6786-a96e-9585-812d1f584332" [ 1238.202583] env[69328]: _type = "HttpNfcLease" [ 1238.202583] env[69328]: }. {{(pid=69328) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1238.203213] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad00caf5-2d04-4a32-b1a4-4c9fb75e23cd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.210220] env[69328]: DEBUG oslo_vmware.rw_handles [None req-3676db1a-46bf-4b77-ba16-541e87c8f7d0 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520f0aad-0633-ddcb-43c5-869ca483ed58/disk-0.vmdk from lease info. {{(pid=69328) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1238.210391] env[69328]: DEBUG oslo_vmware.rw_handles [None req-3676db1a-46bf-4b77-ba16-541e87c8f7d0 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520f0aad-0633-ddcb-43c5-869ca483ed58/disk-0.vmdk for reading. {{(pid=69328) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1238.366325] env[69328]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-f1de9ce2-3687-443a-a1d3-22674c1feb39 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.637598] env[69328]: DEBUG oslo_vmware.api [None req-5f083486-9587-40b9-a59a-ec64cbe0ecca tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Task: {'id': task-3274354, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.135839] env[69328]: DEBUG oslo_vmware.api [None req-5f083486-9587-40b9-a59a-ec64cbe0ecca tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Task: {'id': task-3274354, 'name': PowerOnVM_Task, 'duration_secs': 0.552876} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.136360] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f083486-9587-40b9-a59a-ec64cbe0ecca tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1239.139589] env[69328]: DEBUG nova.compute.manager [None req-5f083486-9587-40b9-a59a-ec64cbe0ecca tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1239.140543] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b19076e-e292-45e3-8c4e-e902095cc452 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.726714] env[69328]: DEBUG nova.compute.manager [req-83e86822-5f7c-44ef-899a-49e54fbd89f0 req-98bafa67-d7c5-4545-ab7c-c704909c7f5a service nova] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Received event network-changed-cf8b5fda-a9e2-4224-a9f4-4cc8b0dc8dc3 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1240.727075] env[69328]: DEBUG nova.compute.manager [req-83e86822-5f7c-44ef-899a-49e54fbd89f0 req-98bafa67-d7c5-4545-ab7c-c704909c7f5a service nova] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Refreshing instance network info cache due to event network-changed-cf8b5fda-a9e2-4224-a9f4-4cc8b0dc8dc3. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1240.727218] env[69328]: DEBUG oslo_concurrency.lockutils [req-83e86822-5f7c-44ef-899a-49e54fbd89f0 req-98bafa67-d7c5-4545-ab7c-c704909c7f5a service nova] Acquiring lock "refresh_cache-28d608b8-c06a-4e71-b3e2-94c63619cec0" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1240.727271] env[69328]: DEBUG oslo_concurrency.lockutils [req-83e86822-5f7c-44ef-899a-49e54fbd89f0 req-98bafa67-d7c5-4545-ab7c-c704909c7f5a service nova] Acquired lock "refresh_cache-28d608b8-c06a-4e71-b3e2-94c63619cec0" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1240.727412] env[69328]: DEBUG nova.network.neutron [req-83e86822-5f7c-44ef-899a-49e54fbd89f0 req-98bafa67-d7c5-4545-ab7c-c704909c7f5a service nova] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Refreshing network info cache for port cf8b5fda-a9e2-4224-a9f4-4cc8b0dc8dc3 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1241.425816] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-a141a5a7-2f9b-4324-ba61-9122e3470f32 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Volume attach. Driver type: vmdk {{(pid=69328) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1241.426109] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-a141a5a7-2f9b-4324-ba61-9122e3470f32 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653985', 'volume_id': '01dd0cf8-98ef-4f40-b76d-352f74c03810', 'name': 'volume-01dd0cf8-98ef-4f40-b76d-352f74c03810', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd19f6a2a-3a16-4031-8c20-143ccfd6f5f5', 'attached_at': '', 'detached_at': '', 'volume_id': '01dd0cf8-98ef-4f40-b76d-352f74c03810', 'serial': '01dd0cf8-98ef-4f40-b76d-352f74c03810'} {{(pid=69328) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1241.427047] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9b00eef-c231-4543-b86f-45cd545334d9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.443496] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16be73e9-30a6-4dc3-b188-594f17f702ff {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.468164] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-a141a5a7-2f9b-4324-ba61-9122e3470f32 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Reconfiguring VM instance instance-00000078 to attach disk [datastore1] volume-01dd0cf8-98ef-4f40-b76d-352f74c03810/volume-01dd0cf8-98ef-4f40-b76d-352f74c03810.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1241.470906] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0976639b-64ef-4c4f-bfe3-87e87b95bdb7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.489597] env[69328]: DEBUG oslo_vmware.api [None req-a141a5a7-2f9b-4324-ba61-9122e3470f32 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Waiting for the task: (returnval){ [ 1241.489597] env[69328]: value = "task-3274356" [ 1241.489597] env[69328]: _type = "Task" [ 1241.489597] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.498014] env[69328]: DEBUG oslo_vmware.api [None req-a141a5a7-2f9b-4324-ba61-9122e3470f32 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274356, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.545422] env[69328]: DEBUG nova.network.neutron [req-83e86822-5f7c-44ef-899a-49e54fbd89f0 req-98bafa67-d7c5-4545-ab7c-c704909c7f5a service nova] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Updated VIF entry in instance network info cache for port cf8b5fda-a9e2-4224-a9f4-4cc8b0dc8dc3. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1241.545909] env[69328]: DEBUG nova.network.neutron [req-83e86822-5f7c-44ef-899a-49e54fbd89f0 req-98bafa67-d7c5-4545-ab7c-c704909c7f5a service nova] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Updating instance_info_cache with network_info: [{"id": "cf8b5fda-a9e2-4224-a9f4-4cc8b0dc8dc3", "address": "fa:16:3e:38:a2:6c", "network": {"id": "cc24495f-8f1b-494e-82dc-f9af77c44b57", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1972816415-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f8074418da0e4a25bf9e2a46f08cf284", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae70d41-6ebf-472a-8504-6530eb37ea41", "external-id": "nsx-vlan-transportzone-576", "segmentation_id": 576, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf8b5fda-a9", "ovs_interfaceid": "cf8b5fda-a9e2-4224-a9f4-4cc8b0dc8dc3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1241.853730] env[69328]: DEBUG nova.compute.manager [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Stashing vm_state: active {{(pid=69328) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1241.999292] env[69328]: DEBUG oslo_vmware.api [None req-a141a5a7-2f9b-4324-ba61-9122e3470f32 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274356, 'name': ReconfigVM_Task, 'duration_secs': 0.392023} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1241.999589] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-a141a5a7-2f9b-4324-ba61-9122e3470f32 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Reconfigured VM instance instance-00000078 to attach disk [datastore1] volume-01dd0cf8-98ef-4f40-b76d-352f74c03810/volume-01dd0cf8-98ef-4f40-b76d-352f74c03810.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1242.005064] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e0d22bf5-d767-430c-9472-e3d5b27d9180 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.021298] env[69328]: DEBUG oslo_vmware.api [None req-a141a5a7-2f9b-4324-ba61-9122e3470f32 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Waiting for the task: (returnval){ [ 1242.021298] env[69328]: value = "task-3274357" [ 1242.021298] env[69328]: _type = "Task" [ 1242.021298] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.029493] env[69328]: DEBUG oslo_vmware.api [None req-a141a5a7-2f9b-4324-ba61-9122e3470f32 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274357, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.048598] env[69328]: DEBUG oslo_concurrency.lockutils [req-83e86822-5f7c-44ef-899a-49e54fbd89f0 req-98bafa67-d7c5-4545-ab7c-c704909c7f5a service nova] Releasing lock "refresh_cache-28d608b8-c06a-4e71-b3e2-94c63619cec0" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1242.376372] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1242.376695] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1242.532566] env[69328]: DEBUG oslo_vmware.api [None req-a141a5a7-2f9b-4324-ba61-9122e3470f32 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274357, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.761903] env[69328]: DEBUG nova.compute.manager [req-badf6f1a-6df9-44e1-90d7-3e5fec2caa74 req-482dca0c-c57a-47fe-86a2-94926dad7dcc service nova] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Received event network-changed-cf8b5fda-a9e2-4224-a9f4-4cc8b0dc8dc3 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1242.761903] env[69328]: DEBUG nova.compute.manager [req-badf6f1a-6df9-44e1-90d7-3e5fec2caa74 req-482dca0c-c57a-47fe-86a2-94926dad7dcc service nova] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Refreshing instance network info cache due to event network-changed-cf8b5fda-a9e2-4224-a9f4-4cc8b0dc8dc3. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1242.761998] env[69328]: DEBUG oslo_concurrency.lockutils [req-badf6f1a-6df9-44e1-90d7-3e5fec2caa74 req-482dca0c-c57a-47fe-86a2-94926dad7dcc service nova] Acquiring lock "refresh_cache-28d608b8-c06a-4e71-b3e2-94c63619cec0" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1242.762128] env[69328]: DEBUG oslo_concurrency.lockutils [req-badf6f1a-6df9-44e1-90d7-3e5fec2caa74 req-482dca0c-c57a-47fe-86a2-94926dad7dcc service nova] Acquired lock "refresh_cache-28d608b8-c06a-4e71-b3e2-94c63619cec0" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1242.762297] env[69328]: DEBUG nova.network.neutron [req-badf6f1a-6df9-44e1-90d7-3e5fec2caa74 req-482dca0c-c57a-47fe-86a2-94926dad7dcc service nova] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Refreshing network info cache for port cf8b5fda-a9e2-4224-a9f4-4cc8b0dc8dc3 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1242.881607] env[69328]: INFO nova.compute.claims [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1242.905610] env[69328]: DEBUG nova.compute.manager [req-5e57e9a9-7500-4771-9745-60b1a447c11d req-0ee525e9-a6ab-4434-902d-47b64c9e56cc service nova] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Received event network-changed-cf8b5fda-a9e2-4224-a9f4-4cc8b0dc8dc3 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1242.905815] env[69328]: DEBUG nova.compute.manager [req-5e57e9a9-7500-4771-9745-60b1a447c11d req-0ee525e9-a6ab-4434-902d-47b64c9e56cc service nova] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Refreshing instance network info cache due to event network-changed-cf8b5fda-a9e2-4224-a9f4-4cc8b0dc8dc3. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1242.906025] env[69328]: DEBUG oslo_concurrency.lockutils [req-5e57e9a9-7500-4771-9745-60b1a447c11d req-0ee525e9-a6ab-4434-902d-47b64c9e56cc service nova] Acquiring lock "refresh_cache-28d608b8-c06a-4e71-b3e2-94c63619cec0" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1243.031689] env[69328]: DEBUG oslo_vmware.api [None req-a141a5a7-2f9b-4324-ba61-9122e3470f32 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274357, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.391151] env[69328]: INFO nova.compute.resource_tracker [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Updating resource usage from migration 0c842e7a-fde4-43bb-9a5e-0815948fd051 [ 1243.495506] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e03d429d-fd96-447e-b748-5e658c55860f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.500114] env[69328]: DEBUG nova.network.neutron [req-badf6f1a-6df9-44e1-90d7-3e5fec2caa74 req-482dca0c-c57a-47fe-86a2-94926dad7dcc service nova] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Updated VIF entry in instance network info cache for port cf8b5fda-a9e2-4224-a9f4-4cc8b0dc8dc3. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1243.500449] env[69328]: DEBUG nova.network.neutron [req-badf6f1a-6df9-44e1-90d7-3e5fec2caa74 req-482dca0c-c57a-47fe-86a2-94926dad7dcc service nova] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Updating instance_info_cache with network_info: [{"id": "cf8b5fda-a9e2-4224-a9f4-4cc8b0dc8dc3", "address": "fa:16:3e:38:a2:6c", "network": {"id": "cc24495f-8f1b-494e-82dc-f9af77c44b57", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1972816415-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f8074418da0e4a25bf9e2a46f08cf284", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae70d41-6ebf-472a-8504-6530eb37ea41", "external-id": "nsx-vlan-transportzone-576", "segmentation_id": 576, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf8b5fda-a9", "ovs_interfaceid": "cf8b5fda-a9e2-4224-a9f4-4cc8b0dc8dc3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1243.506658] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a7f6095-8f51-4761-afda-ed16eb5c0ca2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.546508] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1aab133-c599-4f31-a500-a6a1a9fd8881 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.554775] env[69328]: DEBUG oslo_vmware.api [None req-a141a5a7-2f9b-4324-ba61-9122e3470f32 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274357, 'name': ReconfigVM_Task, 'duration_secs': 1.129714} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.557080] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-a141a5a7-2f9b-4324-ba61-9122e3470f32 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653985', 'volume_id': '01dd0cf8-98ef-4f40-b76d-352f74c03810', 'name': 'volume-01dd0cf8-98ef-4f40-b76d-352f74c03810', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd19f6a2a-3a16-4031-8c20-143ccfd6f5f5', 'attached_at': '', 'detached_at': '', 'volume_id': '01dd0cf8-98ef-4f40-b76d-352f74c03810', 'serial': '01dd0cf8-98ef-4f40-b76d-352f74c03810'} {{(pid=69328) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1243.559550] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33e09fc6-7e27-4bd5-a304-fa7011e30582 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.573831] env[69328]: DEBUG nova.compute.provider_tree [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1244.004233] env[69328]: DEBUG oslo_concurrency.lockutils [req-badf6f1a-6df9-44e1-90d7-3e5fec2caa74 req-482dca0c-c57a-47fe-86a2-94926dad7dcc service nova] Releasing lock "refresh_cache-28d608b8-c06a-4e71-b3e2-94c63619cec0" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1244.004729] env[69328]: DEBUG nova.compute.manager [req-badf6f1a-6df9-44e1-90d7-3e5fec2caa74 req-482dca0c-c57a-47fe-86a2-94926dad7dcc service nova] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Received event network-changed-cf8b5fda-a9e2-4224-a9f4-4cc8b0dc8dc3 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1244.004729] env[69328]: DEBUG nova.compute.manager [req-badf6f1a-6df9-44e1-90d7-3e5fec2caa74 req-482dca0c-c57a-47fe-86a2-94926dad7dcc service nova] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Refreshing instance network info cache due to event network-changed-cf8b5fda-a9e2-4224-a9f4-4cc8b0dc8dc3. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1244.004971] env[69328]: DEBUG oslo_concurrency.lockutils [req-badf6f1a-6df9-44e1-90d7-3e5fec2caa74 req-482dca0c-c57a-47fe-86a2-94926dad7dcc service nova] Acquiring lock "refresh_cache-28d608b8-c06a-4e71-b3e2-94c63619cec0" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1244.005131] env[69328]: DEBUG oslo_concurrency.lockutils [req-badf6f1a-6df9-44e1-90d7-3e5fec2caa74 req-482dca0c-c57a-47fe-86a2-94926dad7dcc service nova] Acquired lock "refresh_cache-28d608b8-c06a-4e71-b3e2-94c63619cec0" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1244.005337] env[69328]: DEBUG nova.network.neutron [req-badf6f1a-6df9-44e1-90d7-3e5fec2caa74 req-482dca0c-c57a-47fe-86a2-94926dad7dcc service nova] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Refreshing network info cache for port cf8b5fda-a9e2-4224-a9f4-4cc8b0dc8dc3 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1244.077879] env[69328]: DEBUG nova.scheduler.client.report [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1244.122765] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c2d50c67-cb4b-4bdb-905b-efaa55508b8e tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Acquiring lock "28d608b8-c06a-4e71-b3e2-94c63619cec0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1244.123071] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c2d50c67-cb4b-4bdb-905b-efaa55508b8e tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Lock "28d608b8-c06a-4e71-b3e2-94c63619cec0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1244.123292] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c2d50c67-cb4b-4bdb-905b-efaa55508b8e tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Acquiring lock "28d608b8-c06a-4e71-b3e2-94c63619cec0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1244.123479] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c2d50c67-cb4b-4bdb-905b-efaa55508b8e tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Lock "28d608b8-c06a-4e71-b3e2-94c63619cec0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1244.123644] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c2d50c67-cb4b-4bdb-905b-efaa55508b8e tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Lock "28d608b8-c06a-4e71-b3e2-94c63619cec0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1244.126129] env[69328]: INFO nova.compute.manager [None req-c2d50c67-cb4b-4bdb-905b-efaa55508b8e tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Terminating instance [ 1244.582661] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.206s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1244.582871] env[69328]: INFO nova.compute.manager [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Migrating [ 1244.598066] env[69328]: DEBUG nova.objects.instance [None req-a141a5a7-2f9b-4324-ba61-9122e3470f32 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lazy-loading 'flavor' on Instance uuid d19f6a2a-3a16-4031-8c20-143ccfd6f5f5 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1244.629409] env[69328]: DEBUG nova.compute.manager [None req-c2d50c67-cb4b-4bdb-905b-efaa55508b8e tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1244.629672] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c2d50c67-cb4b-4bdb-905b-efaa55508b8e tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1244.630783] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebb3c2e6-9a72-4226-bd9c-a7927dc65d1b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.639031] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2d50c67-cb4b-4bdb-905b-efaa55508b8e tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1244.639215] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6351b3ba-eeaa-48fa-8535-371e674c4903 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.648813] env[69328]: DEBUG oslo_vmware.api [None req-c2d50c67-cb4b-4bdb-905b-efaa55508b8e tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Waiting for the task: (returnval){ [ 1244.648813] env[69328]: value = "task-3274358" [ 1244.648813] env[69328]: _type = "Task" [ 1244.648813] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.656873] env[69328]: DEBUG oslo_vmware.api [None req-c2d50c67-cb4b-4bdb-905b-efaa55508b8e tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Task: {'id': task-3274358, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.706983] env[69328]: DEBUG nova.network.neutron [req-badf6f1a-6df9-44e1-90d7-3e5fec2caa74 req-482dca0c-c57a-47fe-86a2-94926dad7dcc service nova] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Updated VIF entry in instance network info cache for port cf8b5fda-a9e2-4224-a9f4-4cc8b0dc8dc3. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1244.707408] env[69328]: DEBUG nova.network.neutron [req-badf6f1a-6df9-44e1-90d7-3e5fec2caa74 req-482dca0c-c57a-47fe-86a2-94926dad7dcc service nova] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Updating instance_info_cache with network_info: [{"id": "cf8b5fda-a9e2-4224-a9f4-4cc8b0dc8dc3", "address": "fa:16:3e:38:a2:6c", "network": {"id": "cc24495f-8f1b-494e-82dc-f9af77c44b57", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1972816415-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f8074418da0e4a25bf9e2a46f08cf284", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae70d41-6ebf-472a-8504-6530eb37ea41", "external-id": "nsx-vlan-transportzone-576", "segmentation_id": 576, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf8b5fda-a9", "ovs_interfaceid": "cf8b5fda-a9e2-4224-a9f4-4cc8b0dc8dc3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1245.100029] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "refresh_cache-566c3167-4cf2-4236-812f-dfbf30bbaf6f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1245.100432] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquired lock "refresh_cache-566c3167-4cf2-4236-812f-dfbf30bbaf6f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1245.100432] env[69328]: DEBUG nova.network.neutron [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1245.104921] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a141a5a7-2f9b-4324-ba61-9122e3470f32 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lock "d19f6a2a-3a16-4031-8c20-143ccfd6f5f5" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.286s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1245.159311] env[69328]: DEBUG oslo_vmware.api [None req-c2d50c67-cb4b-4bdb-905b-efaa55508b8e tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Task: {'id': task-3274358, 'name': PowerOffVM_Task, 'duration_secs': 0.459413} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.159602] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2d50c67-cb4b-4bdb-905b-efaa55508b8e tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1245.159792] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c2d50c67-cb4b-4bdb-905b-efaa55508b8e tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1245.160062] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-74c93bf3-b721-48a7-a195-ef6ecd480d34 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.210689] env[69328]: DEBUG oslo_concurrency.lockutils [req-badf6f1a-6df9-44e1-90d7-3e5fec2caa74 req-482dca0c-c57a-47fe-86a2-94926dad7dcc service nova] Releasing lock "refresh_cache-28d608b8-c06a-4e71-b3e2-94c63619cec0" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1245.211156] env[69328]: DEBUG oslo_concurrency.lockutils [req-5e57e9a9-7500-4771-9745-60b1a447c11d req-0ee525e9-a6ab-4434-902d-47b64c9e56cc service nova] Acquired lock "refresh_cache-28d608b8-c06a-4e71-b3e2-94c63619cec0" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1245.211275] env[69328]: DEBUG nova.network.neutron [req-5e57e9a9-7500-4771-9745-60b1a447c11d req-0ee525e9-a6ab-4434-902d-47b64c9e56cc service nova] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Refreshing network info cache for port cf8b5fda-a9e2-4224-a9f4-4cc8b0dc8dc3 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1245.238203] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c2d50c67-cb4b-4bdb-905b-efaa55508b8e tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1245.238423] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c2d50c67-cb4b-4bdb-905b-efaa55508b8e tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1245.238691] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2d50c67-cb4b-4bdb-905b-efaa55508b8e tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Deleting the datastore file [datastore1] 28d608b8-c06a-4e71-b3e2-94c63619cec0 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1245.238980] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-001d4f6c-9119-4492-8709-cb0127701378 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.245377] env[69328]: DEBUG oslo_vmware.api [None req-c2d50c67-cb4b-4bdb-905b-efaa55508b8e tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Waiting for the task: (returnval){ [ 1245.245377] env[69328]: value = "task-3274360" [ 1245.245377] env[69328]: _type = "Task" [ 1245.245377] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1245.253507] env[69328]: DEBUG oslo_vmware.api [None req-c2d50c67-cb4b-4bdb-905b-efaa55508b8e tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Task: {'id': task-3274360, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.555172] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8bbde04e-7bcb-4eac-b0b3-d2db0343ceeb tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Acquiring lock "d19f6a2a-3a16-4031-8c20-143ccfd6f5f5" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1245.555505] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8bbde04e-7bcb-4eac-b0b3-d2db0343ceeb tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lock "d19f6a2a-3a16-4031-8c20-143ccfd6f5f5" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1245.555757] env[69328]: DEBUG nova.compute.manager [None req-8bbde04e-7bcb-4eac-b0b3-d2db0343ceeb tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1245.556748] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b7a4944-0c55-4d8f-8d08-1e42ca496e9e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.564536] env[69328]: DEBUG nova.compute.manager [None req-8bbde04e-7bcb-4eac-b0b3-d2db0343ceeb tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69328) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1245.565164] env[69328]: DEBUG nova.objects.instance [None req-8bbde04e-7bcb-4eac-b0b3-d2db0343ceeb tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lazy-loading 'flavor' on Instance uuid d19f6a2a-3a16-4031-8c20-143ccfd6f5f5 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1245.756870] env[69328]: DEBUG oslo_vmware.api [None req-c2d50c67-cb4b-4bdb-905b-efaa55508b8e tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Task: {'id': task-3274360, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.322432} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.759862] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2d50c67-cb4b-4bdb-905b-efaa55508b8e tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1245.759862] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c2d50c67-cb4b-4bdb-905b-efaa55508b8e tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1245.760050] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-c2d50c67-cb4b-4bdb-905b-efaa55508b8e tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1245.760156] env[69328]: INFO nova.compute.manager [None req-c2d50c67-cb4b-4bdb-905b-efaa55508b8e tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1245.760394] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c2d50c67-cb4b-4bdb-905b-efaa55508b8e tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1245.760587] env[69328]: DEBUG nova.compute.manager [-] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1245.760682] env[69328]: DEBUG nova.network.neutron [-] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1245.879084] env[69328]: DEBUG nova.network.neutron [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Updating instance_info_cache with network_info: [{"id": "119833be-9532-4d57-aece-6b3a83d11e9f", "address": "fa:16:3e:05:b2:63", "network": {"id": "4031def8-0553-461f-9528-aa338b9d7b2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1834835647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.196", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f357b5a9494b4849a83aa934c5d4e26b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "357d2811-e990-4985-9f9e-b158d10d3699", "external-id": "nsx-vlan-transportzone-641", "segmentation_id": 641, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap119833be-95", "ovs_interfaceid": "119833be-9532-4d57-aece-6b3a83d11e9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1245.934695] env[69328]: DEBUG nova.network.neutron [req-5e57e9a9-7500-4771-9745-60b1a447c11d req-0ee525e9-a6ab-4434-902d-47b64c9e56cc service nova] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Updated VIF entry in instance network info cache for port cf8b5fda-a9e2-4224-a9f4-4cc8b0dc8dc3. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1245.934695] env[69328]: DEBUG nova.network.neutron [req-5e57e9a9-7500-4771-9745-60b1a447c11d req-0ee525e9-a6ab-4434-902d-47b64c9e56cc service nova] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Updating instance_info_cache with network_info: [{"id": "cf8b5fda-a9e2-4224-a9f4-4cc8b0dc8dc3", "address": "fa:16:3e:38:a2:6c", "network": {"id": "cc24495f-8f1b-494e-82dc-f9af77c44b57", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1972816415-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f8074418da0e4a25bf9e2a46f08cf284", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cae70d41-6ebf-472a-8504-6530eb37ea41", "external-id": "nsx-vlan-transportzone-576", "segmentation_id": 576, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf8b5fda-a9", "ovs_interfaceid": "cf8b5fda-a9e2-4224-a9f4-4cc8b0dc8dc3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1246.033756] env[69328]: DEBUG nova.compute.manager [req-9b2043ef-0dd0-4b71-a040-dc58b5c81a5b req-6a3aed91-de06-4bd6-aeb0-258a6e8e7596 service nova] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Received event network-vif-deleted-cf8b5fda-a9e2-4224-a9f4-4cc8b0dc8dc3 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1246.033991] env[69328]: INFO nova.compute.manager [req-9b2043ef-0dd0-4b71-a040-dc58b5c81a5b req-6a3aed91-de06-4bd6-aeb0-258a6e8e7596 service nova] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Neutron deleted interface cf8b5fda-a9e2-4224-a9f4-4cc8b0dc8dc3; detaching it from the instance and deleting it from the info cache [ 1246.034981] env[69328]: DEBUG nova.network.neutron [req-9b2043ef-0dd0-4b71-a040-dc58b5c81a5b req-6a3aed91-de06-4bd6-aeb0-258a6e8e7596 service nova] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1246.384330] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Releasing lock "refresh_cache-566c3167-4cf2-4236-812f-dfbf30bbaf6f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1246.437470] env[69328]: DEBUG oslo_concurrency.lockutils [req-5e57e9a9-7500-4771-9745-60b1a447c11d req-0ee525e9-a6ab-4434-902d-47b64c9e56cc service nova] Releasing lock "refresh_cache-28d608b8-c06a-4e71-b3e2-94c63619cec0" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1246.517791] env[69328]: DEBUG nova.network.neutron [-] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1246.539087] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aedfc8de-d768-41be-8099-a9d997828f06 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.549375] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d33ae4f9-0114-4d49-a2f9-4c2291e89c7b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.578985] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bbde04e-7bcb-4eac-b0b3-d2db0343ceeb tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1246.579403] env[69328]: DEBUG nova.compute.manager [req-9b2043ef-0dd0-4b71-a040-dc58b5c81a5b req-6a3aed91-de06-4bd6-aeb0-258a6e8e7596 service nova] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Detach interface failed, port_id=cf8b5fda-a9e2-4224-a9f4-4cc8b0dc8dc3, reason: Instance 28d608b8-c06a-4e71-b3e2-94c63619cec0 could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1246.579796] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0fe6b073-7787-48a6-ae47-9256bfecd323 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.587032] env[69328]: DEBUG oslo_vmware.api [None req-8bbde04e-7bcb-4eac-b0b3-d2db0343ceeb tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Waiting for the task: (returnval){ [ 1246.587032] env[69328]: value = "task-3274361" [ 1246.587032] env[69328]: _type = "Task" [ 1246.587032] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1246.597618] env[69328]: DEBUG oslo_vmware.api [None req-8bbde04e-7bcb-4eac-b0b3-d2db0343ceeb tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274361, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.020470] env[69328]: INFO nova.compute.manager [-] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Took 1.26 seconds to deallocate network for instance. [ 1247.097032] env[69328]: DEBUG oslo_vmware.api [None req-8bbde04e-7bcb-4eac-b0b3-d2db0343ceeb tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274361, 'name': PowerOffVM_Task, 'duration_secs': 0.235493} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1247.097345] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bbde04e-7bcb-4eac-b0b3-d2db0343ceeb tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1247.097474] env[69328]: DEBUG nova.compute.manager [None req-8bbde04e-7bcb-4eac-b0b3-d2db0343ceeb tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1247.098534] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce7faaa6-5c41-4f42-831d-d363a5b0be8a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.213480] env[69328]: DEBUG oslo_vmware.rw_handles [None req-3676db1a-46bf-4b77-ba16-541e87c8f7d0 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520f0aad-0633-ddcb-43c5-869ca483ed58/disk-0.vmdk. {{(pid=69328) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1247.214420] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29b8e991-4aa6-4fd7-bc01-3174fed680d9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.220677] env[69328]: DEBUG oslo_vmware.rw_handles [None req-3676db1a-46bf-4b77-ba16-541e87c8f7d0 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520f0aad-0633-ddcb-43c5-869ca483ed58/disk-0.vmdk is in state: ready. {{(pid=69328) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1247.220862] env[69328]: ERROR oslo_vmware.rw_handles [None req-3676db1a-46bf-4b77-ba16-541e87c8f7d0 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520f0aad-0633-ddcb-43c5-869ca483ed58/disk-0.vmdk due to incomplete transfer. [ 1247.221095] env[69328]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-459cfe43-99e0-4cde-aa0a-542722dcadbd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.228501] env[69328]: DEBUG oslo_vmware.rw_handles [None req-3676db1a-46bf-4b77-ba16-541e87c8f7d0 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520f0aad-0633-ddcb-43c5-869ca483ed58/disk-0.vmdk. {{(pid=69328) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1247.228691] env[69328]: DEBUG nova.virt.vmwareapi.images [None req-3676db1a-46bf-4b77-ba16-541e87c8f7d0 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Uploaded image 4b4a918b-2d0f-48d1-9439-8f2a9216b87d to the Glance image server {{(pid=69328) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1247.230951] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-3676db1a-46bf-4b77-ba16-541e87c8f7d0 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Destroying the VM {{(pid=69328) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1247.231339] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-1c8ad4f7-6231-45cb-b9c1-bb2765a71bec {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.237415] env[69328]: DEBUG oslo_vmware.api [None req-3676db1a-46bf-4b77-ba16-541e87c8f7d0 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1247.237415] env[69328]: value = "task-3274362" [ 1247.237415] env[69328]: _type = "Task" [ 1247.237415] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1247.244999] env[69328]: DEBUG oslo_vmware.api [None req-3676db1a-46bf-4b77-ba16-541e87c8f7d0 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274362, 'name': Destroy_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.529020] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c2d50c67-cb4b-4bdb-905b-efaa55508b8e tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1247.529020] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c2d50c67-cb4b-4bdb-905b-efaa55508b8e tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1247.529383] env[69328]: DEBUG nova.objects.instance [None req-c2d50c67-cb4b-4bdb-905b-efaa55508b8e tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Lazy-loading 'resources' on Instance uuid 28d608b8-c06a-4e71-b3e2-94c63619cec0 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1247.610879] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8bbde04e-7bcb-4eac-b0b3-d2db0343ceeb tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lock "d19f6a2a-3a16-4031-8c20-143ccfd6f5f5" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.055s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1247.748105] env[69328]: DEBUG oslo_vmware.api [None req-3676db1a-46bf-4b77-ba16-541e87c8f7d0 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274362, 'name': Destroy_Task, 'duration_secs': 0.315661} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1247.748451] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-3676db1a-46bf-4b77-ba16-541e87c8f7d0 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Destroyed the VM [ 1247.748682] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3676db1a-46bf-4b77-ba16-541e87c8f7d0 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Deleting Snapshot of the VM instance {{(pid=69328) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1247.748938] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-c8283a4c-394a-4bca-97b9-42228a0169fa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.757134] env[69328]: DEBUG oslo_vmware.api [None req-3676db1a-46bf-4b77-ba16-541e87c8f7d0 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1247.757134] env[69328]: value = "task-3274363" [ 1247.757134] env[69328]: _type = "Task" [ 1247.757134] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1247.765262] env[69328]: DEBUG oslo_vmware.api [None req-3676db1a-46bf-4b77-ba16-541e87c8f7d0 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274363, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.901973] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5596d6c-49ca-4b6b-a698-1270c8858d65 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.920342] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Updating instance '566c3167-4cf2-4236-812f-dfbf30bbaf6f' progress to 0 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1247.962829] env[69328]: DEBUG nova.objects.instance [None req-db22325d-0c7e-4c64-975e-3b04953a4a7d tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lazy-loading 'flavor' on Instance uuid d19f6a2a-3a16-4031-8c20-143ccfd6f5f5 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1248.123072] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-333134c6-5888-422f-a2e6-fd5373426199 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.130598] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70bd28ba-aff5-4d5c-8cd1-7826c3b3a082 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.161340] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7be41ad-763c-41df-9859-ba4d1a13bbc2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.168190] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81cd2c6b-3937-4bf2-ab19-7996ef93aadb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.181094] env[69328]: DEBUG nova.compute.provider_tree [None req-c2d50c67-cb4b-4bdb-905b-efaa55508b8e tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1248.268102] env[69328]: DEBUG oslo_vmware.api [None req-3676db1a-46bf-4b77-ba16-541e87c8f7d0 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274363, 'name': RemoveSnapshot_Task, 'duration_secs': 0.511332} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.268382] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-3676db1a-46bf-4b77-ba16-541e87c8f7d0 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Deleted Snapshot of the VM instance {{(pid=69328) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1248.268611] env[69328]: INFO nova.compute.manager [None req-3676db1a-46bf-4b77-ba16-541e87c8f7d0 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Took 13.70 seconds to snapshot the instance on the hypervisor. [ 1248.426242] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1248.426843] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4f4770d6-05fe-4fb1-a4f3-f10cb3984763 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.434587] env[69328]: DEBUG oslo_vmware.api [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1248.434587] env[69328]: value = "task-3274364" [ 1248.434587] env[69328]: _type = "Task" [ 1248.434587] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.442337] env[69328]: DEBUG oslo_vmware.api [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274364, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.468465] env[69328]: DEBUG oslo_concurrency.lockutils [None req-db22325d-0c7e-4c64-975e-3b04953a4a7d tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Acquiring lock "refresh_cache-d19f6a2a-3a16-4031-8c20-143ccfd6f5f5" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1248.468631] env[69328]: DEBUG oslo_concurrency.lockutils [None req-db22325d-0c7e-4c64-975e-3b04953a4a7d tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Acquired lock "refresh_cache-d19f6a2a-3a16-4031-8c20-143ccfd6f5f5" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1248.468793] env[69328]: DEBUG nova.network.neutron [None req-db22325d-0c7e-4c64-975e-3b04953a4a7d tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1248.468967] env[69328]: DEBUG nova.objects.instance [None req-db22325d-0c7e-4c64-975e-3b04953a4a7d tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lazy-loading 'info_cache' on Instance uuid d19f6a2a-3a16-4031-8c20-143ccfd6f5f5 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1248.686140] env[69328]: DEBUG nova.scheduler.client.report [None req-c2d50c67-cb4b-4bdb-905b-efaa55508b8e tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1248.842263] env[69328]: DEBUG nova.compute.manager [None req-3676db1a-46bf-4b77-ba16-541e87c8f7d0 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Found 2 images (rotation: 2) {{(pid=69328) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 1248.943993] env[69328]: DEBUG oslo_vmware.api [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274364, 'name': PowerOffVM_Task, 'duration_secs': 0.208961} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.944426] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1248.944736] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Updating instance '566c3167-4cf2-4236-812f-dfbf30bbaf6f' progress to 17 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1248.971771] env[69328]: DEBUG nova.objects.base [None req-db22325d-0c7e-4c64-975e-3b04953a4a7d tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=69328) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1249.191511] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c2d50c67-cb4b-4bdb-905b-efaa55508b8e tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.662s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1249.211600] env[69328]: INFO nova.scheduler.client.report [None req-c2d50c67-cb4b-4bdb-905b-efaa55508b8e tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Deleted allocations for instance 28d608b8-c06a-4e71-b3e2-94c63619cec0 [ 1249.451339] env[69328]: DEBUG nova.virt.hardware [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:34:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1249.451564] env[69328]: DEBUG nova.virt.hardware [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1249.451702] env[69328]: DEBUG nova.virt.hardware [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1249.451885] env[69328]: DEBUG nova.virt.hardware [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1249.452041] env[69328]: DEBUG nova.virt.hardware [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1249.452196] env[69328]: DEBUG nova.virt.hardware [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1249.452398] env[69328]: DEBUG nova.virt.hardware [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1249.452558] env[69328]: DEBUG nova.virt.hardware [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1249.452725] env[69328]: DEBUG nova.virt.hardware [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1249.452887] env[69328]: DEBUG nova.virt.hardware [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1249.453076] env[69328]: DEBUG nova.virt.hardware [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1249.458198] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-75377263-4c8a-43d3-8128-137947090ea9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.476772] env[69328]: DEBUG oslo_vmware.api [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1249.476772] env[69328]: value = "task-3274365" [ 1249.476772] env[69328]: _type = "Task" [ 1249.476772] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.484823] env[69328]: DEBUG oslo_vmware.api [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274365, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.523318] env[69328]: DEBUG nova.compute.manager [None req-7d22f608-c81d-4d04-a75e-ab80122e03b5 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1249.524572] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f2dd003-f9fc-485e-ae99-a1bbe3bb1a62 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.719561] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c2d50c67-cb4b-4bdb-905b-efaa55508b8e tempest-ServerRescueTestJSONUnderV235-1308637535 tempest-ServerRescueTestJSONUnderV235-1308637535-project-member] Lock "28d608b8-c06a-4e71-b3e2-94c63619cec0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.596s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1249.721164] env[69328]: DEBUG nova.network.neutron [None req-db22325d-0c7e-4c64-975e-3b04953a4a7d tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Updating instance_info_cache with network_info: [{"id": "69e73394-845a-4108-8b2f-6b23a000d98c", "address": "fa:16:3e:e9:f3:15", "network": {"id": "6d64fb31-1957-4722-a4b3-46b946bfb65a", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1232247602-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "278be2f8452946b9ab9c4bce8f9a7557", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5bd281ed-ae39-485f-90ee-4ee27994b5b0", "external-id": "nsx-vlan-transportzone-305", "segmentation_id": 305, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap69e73394-84", "ovs_interfaceid": "69e73394-845a-4108-8b2f-6b23a000d98c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1249.987073] env[69328]: DEBUG oslo_vmware.api [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274365, 'name': ReconfigVM_Task, 'duration_secs': 0.17765} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.987379] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Updating instance '566c3167-4cf2-4236-812f-dfbf30bbaf6f' progress to 33 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1250.035120] env[69328]: INFO nova.compute.manager [None req-7d22f608-c81d-4d04-a75e-ab80122e03b5 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] instance snapshotting [ 1250.035742] env[69328]: DEBUG nova.objects.instance [None req-7d22f608-c81d-4d04-a75e-ab80122e03b5 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lazy-loading 'flavor' on Instance uuid 0cf68559-5f07-4006-9f7f-59027e31635d {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1250.223182] env[69328]: DEBUG oslo_concurrency.lockutils [None req-db22325d-0c7e-4c64-975e-3b04953a4a7d tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Releasing lock "refresh_cache-d19f6a2a-3a16-4031-8c20-143ccfd6f5f5" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1250.493772] env[69328]: DEBUG nova.virt.hardware [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1250.494027] env[69328]: DEBUG nova.virt.hardware [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1250.494195] env[69328]: DEBUG nova.virt.hardware [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1250.494382] env[69328]: DEBUG nova.virt.hardware [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1250.494527] env[69328]: DEBUG nova.virt.hardware [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1250.494676] env[69328]: DEBUG nova.virt.hardware [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1250.494925] env[69328]: DEBUG nova.virt.hardware [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1250.495146] env[69328]: DEBUG nova.virt.hardware [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1250.495343] env[69328]: DEBUG nova.virt.hardware [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1250.495514] env[69328]: DEBUG nova.virt.hardware [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1250.495735] env[69328]: DEBUG nova.virt.hardware [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1250.502821] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Reconfiguring VM instance instance-00000079 to detach disk 2000 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1250.503122] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-85ba7579-ea19-420d-9cfd-b9a0fb8e9eb1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.523648] env[69328]: DEBUG oslo_vmware.api [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1250.523648] env[69328]: value = "task-3274366" [ 1250.523648] env[69328]: _type = "Task" [ 1250.523648] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.534674] env[69328]: DEBUG oslo_vmware.api [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274366, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.540906] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43ebb979-6496-4550-8c81-2f2d09f085c1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.566230] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a971e156-e7f5-47e4-9647-dda05064508c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.033375] env[69328]: DEBUG oslo_vmware.api [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274366, 'name': ReconfigVM_Task, 'duration_secs': 0.172879} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.033666] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Reconfigured VM instance instance-00000079 to detach disk 2000 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1251.034456] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65521206-1f19-4ae1-b406-5ca2e9839e08 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.056036] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Reconfiguring VM instance instance-00000079 to attach disk [datastore1] 566c3167-4cf2-4236-812f-dfbf30bbaf6f/566c3167-4cf2-4236-812f-dfbf30bbaf6f.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1251.056298] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-da1eeebf-27fa-473d-9948-f1c380a73fe4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.074389] env[69328]: DEBUG oslo_vmware.api [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1251.074389] env[69328]: value = "task-3274367" [ 1251.074389] env[69328]: _type = "Task" [ 1251.074389] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.079701] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7d22f608-c81d-4d04-a75e-ab80122e03b5 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Creating Snapshot of the VM instance {{(pid=69328) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1251.079959] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-e93c43d1-4b3c-4b93-b59c-c546017940f8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.087621] env[69328]: DEBUG oslo_vmware.api [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274367, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.088883] env[69328]: DEBUG oslo_vmware.api [None req-7d22f608-c81d-4d04-a75e-ab80122e03b5 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1251.088883] env[69328]: value = "task-3274368" [ 1251.088883] env[69328]: _type = "Task" [ 1251.088883] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.096569] env[69328]: DEBUG oslo_vmware.api [None req-7d22f608-c81d-4d04-a75e-ab80122e03b5 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274368, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.232712] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-db22325d-0c7e-4c64-975e-3b04953a4a7d tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1251.233043] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-828543ed-4a42-484c-bfb8-6e9cb9f03e1a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.241809] env[69328]: DEBUG oslo_vmware.api [None req-db22325d-0c7e-4c64-975e-3b04953a4a7d tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Waiting for the task: (returnval){ [ 1251.241809] env[69328]: value = "task-3274369" [ 1251.241809] env[69328]: _type = "Task" [ 1251.241809] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.257313] env[69328]: DEBUG oslo_vmware.api [None req-db22325d-0c7e-4c64-975e-3b04953a4a7d tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274369, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.584273] env[69328]: DEBUG oslo_vmware.api [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274367, 'name': ReconfigVM_Task, 'duration_secs': 0.260244} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.584600] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Reconfigured VM instance instance-00000079 to attach disk [datastore1] 566c3167-4cf2-4236-812f-dfbf30bbaf6f/566c3167-4cf2-4236-812f-dfbf30bbaf6f.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1251.584883] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Updating instance '566c3167-4cf2-4236-812f-dfbf30bbaf6f' progress to 50 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1251.598664] env[69328]: DEBUG oslo_vmware.api [None req-7d22f608-c81d-4d04-a75e-ab80122e03b5 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274368, 'name': CreateSnapshot_Task, 'duration_secs': 0.401843} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.598880] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7d22f608-c81d-4d04-a75e-ab80122e03b5 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Created Snapshot of the VM instance {{(pid=69328) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1251.599640] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a27cd4c-7f0d-4f0e-a024-7821e10229be {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.752997] env[69328]: DEBUG oslo_vmware.api [None req-db22325d-0c7e-4c64-975e-3b04953a4a7d tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274369, 'name': PowerOnVM_Task, 'duration_secs': 0.373617} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.753337] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-db22325d-0c7e-4c64-975e-3b04953a4a7d tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1251.753611] env[69328]: DEBUG nova.compute.manager [None req-db22325d-0c7e-4c64-975e-3b04953a4a7d tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1251.754440] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-919c4852-e52e-4d9c-9603-49b2aff44068 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.092110] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efb7aa9f-c12f-430a-a137-379535a57ade {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.116581] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7d22f608-c81d-4d04-a75e-ab80122e03b5 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Creating linked-clone VM from snapshot {{(pid=69328) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1252.116913] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-8a68cc65-4a81-4d39-9da1-e5037fd99710 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.120058] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9ff7a5a-bce4-41f7-af8c-716e87d782ac {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.137362] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Updating instance '566c3167-4cf2-4236-812f-dfbf30bbaf6f' progress to 67 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1252.141460] env[69328]: DEBUG oslo_vmware.api [None req-7d22f608-c81d-4d04-a75e-ab80122e03b5 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1252.141460] env[69328]: value = "task-3274370" [ 1252.141460] env[69328]: _type = "Task" [ 1252.141460] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1252.148831] env[69328]: DEBUG oslo_vmware.api [None req-7d22f608-c81d-4d04-a75e-ab80122e03b5 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274370, 'name': CloneVM_Task} progress is 10%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.655340] env[69328]: DEBUG oslo_vmware.api [None req-7d22f608-c81d-4d04-a75e-ab80122e03b5 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274370, 'name': CloneVM_Task} progress is 94%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.682764] env[69328]: DEBUG nova.network.neutron [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Port 119833be-9532-4d57-aece-6b3a83d11e9f binding to destination host cpu-1 is already ACTIVE {{(pid=69328) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1253.153139] env[69328]: DEBUG oslo_vmware.api [None req-7d22f608-c81d-4d04-a75e-ab80122e03b5 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274370, 'name': CloneVM_Task} progress is 95%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.653988] env[69328]: DEBUG oslo_vmware.api [None req-7d22f608-c81d-4d04-a75e-ab80122e03b5 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274370, 'name': CloneVM_Task, 'duration_secs': 1.116981} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.654296] env[69328]: INFO nova.virt.vmwareapi.vmops [None req-7d22f608-c81d-4d04-a75e-ab80122e03b5 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Created linked-clone VM from snapshot [ 1253.655073] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b645571-842e-4599-b68e-0e41a31e12f1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.662336] env[69328]: DEBUG nova.virt.vmwareapi.images [None req-7d22f608-c81d-4d04-a75e-ab80122e03b5 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Uploading image bf8d4f72-f35a-47f2-bc6e-496f04308a62 {{(pid=69328) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1253.689195] env[69328]: DEBUG oslo_vmware.rw_handles [None req-7d22f608-c81d-4d04-a75e-ab80122e03b5 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1253.689195] env[69328]: value = "vm-653987" [ 1253.689195] env[69328]: _type = "VirtualMachine" [ 1253.689195] env[69328]: }. {{(pid=69328) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1253.689522] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-7d840b28-a3bf-465e-82c8-47882b310485 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.711024] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "566c3167-4cf2-4236-812f-dfbf30bbaf6f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1253.711024] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "566c3167-4cf2-4236-812f-dfbf30bbaf6f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1253.711024] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "566c3167-4cf2-4236-812f-dfbf30bbaf6f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1253.711296] env[69328]: DEBUG oslo_vmware.rw_handles [None req-7d22f608-c81d-4d04-a75e-ab80122e03b5 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lease: (returnval){ [ 1253.711296] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5254f561-b807-1ba2-1fd3-5cea1067deaf" [ 1253.711296] env[69328]: _type = "HttpNfcLease" [ 1253.711296] env[69328]: } obtained for exporting VM: (result){ [ 1253.711296] env[69328]: value = "vm-653987" [ 1253.711296] env[69328]: _type = "VirtualMachine" [ 1253.711296] env[69328]: }. {{(pid=69328) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1253.711566] env[69328]: DEBUG oslo_vmware.api [None req-7d22f608-c81d-4d04-a75e-ab80122e03b5 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the lease: (returnval){ [ 1253.711566] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5254f561-b807-1ba2-1fd3-5cea1067deaf" [ 1253.711566] env[69328]: _type = "HttpNfcLease" [ 1253.711566] env[69328]: } to be ready. {{(pid=69328) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1253.719101] env[69328]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1253.719101] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5254f561-b807-1ba2-1fd3-5cea1067deaf" [ 1253.719101] env[69328]: _type = "HttpNfcLease" [ 1253.719101] env[69328]: } is ready. {{(pid=69328) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1253.719912] env[69328]: DEBUG oslo_vmware.rw_handles [None req-7d22f608-c81d-4d04-a75e-ab80122e03b5 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1253.719912] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5254f561-b807-1ba2-1fd3-5cea1067deaf" [ 1253.719912] env[69328]: _type = "HttpNfcLease" [ 1253.719912] env[69328]: }. {{(pid=69328) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1253.720759] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-822d8f20-0e94-4490-a4f6-de01797239ed {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.729756] env[69328]: DEBUG oslo_vmware.rw_handles [None req-7d22f608-c81d-4d04-a75e-ab80122e03b5 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525f16e4-9fbf-3374-bf50-4ff1e82ba746/disk-0.vmdk from lease info. {{(pid=69328) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1253.729955] env[69328]: DEBUG oslo_vmware.rw_handles [None req-7d22f608-c81d-4d04-a75e-ab80122e03b5 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525f16e4-9fbf-3374-bf50-4ff1e82ba746/disk-0.vmdk for reading. {{(pid=69328) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1253.818870] env[69328]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-1209535b-e752-4940-9a68-a6c1b549e521 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.743028] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "refresh_cache-566c3167-4cf2-4236-812f-dfbf30bbaf6f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1254.743325] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquired lock "refresh_cache-566c3167-4cf2-4236-812f-dfbf30bbaf6f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1254.743785] env[69328]: DEBUG nova.network.neutron [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1255.450940] env[69328]: DEBUG nova.network.neutron [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Updating instance_info_cache with network_info: [{"id": "119833be-9532-4d57-aece-6b3a83d11e9f", "address": "fa:16:3e:05:b2:63", "network": {"id": "4031def8-0553-461f-9528-aa338b9d7b2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1834835647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.196", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f357b5a9494b4849a83aa934c5d4e26b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "357d2811-e990-4985-9f9e-b158d10d3699", "external-id": "nsx-vlan-transportzone-641", "segmentation_id": 641, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap119833be-95", "ovs_interfaceid": "119833be-9532-4d57-aece-6b3a83d11e9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1255.954636] env[69328]: DEBUG oslo_concurrency.lockutils [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Releasing lock "refresh_cache-566c3167-4cf2-4236-812f-dfbf30bbaf6f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1256.480321] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a8c6702-88ab-476f-8962-21b31e3df5aa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.499651] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a6523c6-85b4-4c47-aec0-5da3370c364b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.506781] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Updating instance '566c3167-4cf2-4236-812f-dfbf30bbaf6f' progress to 83 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1257.014080] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1257.014513] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ab276af8-ff20-405d-92dd-17787fd992cf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.022239] env[69328]: DEBUG oslo_vmware.api [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1257.022239] env[69328]: value = "task-3274372" [ 1257.022239] env[69328]: _type = "Task" [ 1257.022239] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.031145] env[69328]: DEBUG oslo_vmware.api [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274372, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.532777] env[69328]: DEBUG oslo_vmware.api [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274372, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.032448] env[69328]: DEBUG oslo_vmware.api [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274372, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.533546] env[69328]: DEBUG oslo_vmware.api [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274372, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.034687] env[69328]: DEBUG oslo_vmware.api [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274372, 'name': PowerOnVM_Task, 'duration_secs': 1.962455} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1259.035069] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1259.035069] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-cc32aef2-7547-4205-8846-27219026032f tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Updating instance '566c3167-4cf2-4236-812f-dfbf30bbaf6f' progress to 100 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1259.985779] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b84cb5b2-8901-4684-b270-8bae284c4c9e tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Acquiring lock "ae46c18e-15ae-4a47-b05a-a143f10b5ab6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1259.986098] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b84cb5b2-8901-4684-b270-8bae284c4c9e tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lock "ae46c18e-15ae-4a47-b05a-a143f10b5ab6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1259.986317] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b84cb5b2-8901-4684-b270-8bae284c4c9e tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Acquiring lock "ae46c18e-15ae-4a47-b05a-a143f10b5ab6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1259.986437] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b84cb5b2-8901-4684-b270-8bae284c4c9e tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lock "ae46c18e-15ae-4a47-b05a-a143f10b5ab6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1259.986609] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b84cb5b2-8901-4684-b270-8bae284c4c9e tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lock "ae46c18e-15ae-4a47-b05a-a143f10b5ab6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1259.988840] env[69328]: INFO nova.compute.manager [None req-b84cb5b2-8901-4684-b270-8bae284c4c9e tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Terminating instance [ 1260.492542] env[69328]: DEBUG nova.compute.manager [None req-b84cb5b2-8901-4684-b270-8bae284c4c9e tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1260.492937] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b84cb5b2-8901-4684-b270-8bae284c4c9e tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1260.493672] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c041d8a5-c5c2-4f64-be71-3c1d3009b66e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.502248] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-b84cb5b2-8901-4684-b270-8bae284c4c9e tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1260.502743] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2d6d7e35-f1a0-4acc-86b5-9de2c581eb41 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.510918] env[69328]: DEBUG oslo_vmware.api [None req-b84cb5b2-8901-4684-b270-8bae284c4c9e tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 1260.510918] env[69328]: value = "task-3274373" [ 1260.510918] env[69328]: _type = "Task" [ 1260.510918] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1260.519517] env[69328]: DEBUG oslo_vmware.api [None req-b84cb5b2-8901-4684-b270-8bae284c4c9e tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274373, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.021518] env[69328]: DEBUG oslo_vmware.api [None req-b84cb5b2-8901-4684-b270-8bae284c4c9e tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274373, 'name': PowerOffVM_Task, 'duration_secs': 0.21337} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1261.021854] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-b84cb5b2-8901-4684-b270-8bae284c4c9e tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1261.022074] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b84cb5b2-8901-4684-b270-8bae284c4c9e tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1261.022393] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7fda8019-4b41-4e44-9400-cf1844451483 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.089927] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b84cb5b2-8901-4684-b270-8bae284c4c9e tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1261.090179] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b84cb5b2-8901-4684-b270-8bae284c4c9e tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1261.090346] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-b84cb5b2-8901-4684-b270-8bae284c4c9e tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Deleting the datastore file [datastore2] ae46c18e-15ae-4a47-b05a-a143f10b5ab6 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1261.090613] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-466d20aa-be54-4cd3-a2a2-bf860a4459a9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.097697] env[69328]: DEBUG oslo_vmware.api [None req-b84cb5b2-8901-4684-b270-8bae284c4c9e tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for the task: (returnval){ [ 1261.097697] env[69328]: value = "task-3274375" [ 1261.097697] env[69328]: _type = "Task" [ 1261.097697] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1261.105839] env[69328]: DEBUG oslo_vmware.api [None req-b84cb5b2-8901-4684-b270-8bae284c4c9e tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274375, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.355224] env[69328]: DEBUG oslo_vmware.rw_handles [None req-7d22f608-c81d-4d04-a75e-ab80122e03b5 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525f16e4-9fbf-3374-bf50-4ff1e82ba746/disk-0.vmdk. {{(pid=69328) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1261.358466] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4787584a-635a-48f6-8e65-22f3bdbf82d0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.363480] env[69328]: DEBUG oslo_vmware.rw_handles [None req-7d22f608-c81d-4d04-a75e-ab80122e03b5 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525f16e4-9fbf-3374-bf50-4ff1e82ba746/disk-0.vmdk is in state: ready. {{(pid=69328) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1261.363677] env[69328]: ERROR oslo_vmware.rw_handles [None req-7d22f608-c81d-4d04-a75e-ab80122e03b5 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525f16e4-9fbf-3374-bf50-4ff1e82ba746/disk-0.vmdk due to incomplete transfer. [ 1261.363910] env[69328]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-99d77454-ab53-4242-8b9c-5e9fcc2f0711 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.371490] env[69328]: DEBUG oslo_vmware.rw_handles [None req-7d22f608-c81d-4d04-a75e-ab80122e03b5 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525f16e4-9fbf-3374-bf50-4ff1e82ba746/disk-0.vmdk. {{(pid=69328) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1261.371668] env[69328]: DEBUG nova.virt.vmwareapi.images [None req-7d22f608-c81d-4d04-a75e-ab80122e03b5 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Uploaded image bf8d4f72-f35a-47f2-bc6e-496f04308a62 to the Glance image server {{(pid=69328) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1261.373998] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d22f608-c81d-4d04-a75e-ab80122e03b5 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Destroying the VM {{(pid=69328) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1261.374250] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-4f6f3653-d32b-4ef8-bd52-f78e1d93170d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.381312] env[69328]: DEBUG oslo_vmware.api [None req-7d22f608-c81d-4d04-a75e-ab80122e03b5 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1261.381312] env[69328]: value = "task-3274376" [ 1261.381312] env[69328]: _type = "Task" [ 1261.381312] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1261.389624] env[69328]: DEBUG oslo_vmware.api [None req-7d22f608-c81d-4d04-a75e-ab80122e03b5 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274376, 'name': Destroy_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.412080] env[69328]: DEBUG nova.network.neutron [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Port 119833be-9532-4d57-aece-6b3a83d11e9f binding to destination host cpu-1 is already ACTIVE {{(pid=69328) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1261.412420] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "refresh_cache-566c3167-4cf2-4236-812f-dfbf30bbaf6f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1261.412589] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquired lock "refresh_cache-566c3167-4cf2-4236-812f-dfbf30bbaf6f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1261.412757] env[69328]: DEBUG nova.network.neutron [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1261.608724] env[69328]: DEBUG oslo_vmware.api [None req-b84cb5b2-8901-4684-b270-8bae284c4c9e tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Task: {'id': task-3274375, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.202895} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1261.609149] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-b84cb5b2-8901-4684-b270-8bae284c4c9e tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1261.609149] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b84cb5b2-8901-4684-b270-8bae284c4c9e tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1261.609340] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-b84cb5b2-8901-4684-b270-8bae284c4c9e tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1261.609472] env[69328]: INFO nova.compute.manager [None req-b84cb5b2-8901-4684-b270-8bae284c4c9e tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1261.609717] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b84cb5b2-8901-4684-b270-8bae284c4c9e tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1261.609912] env[69328]: DEBUG nova.compute.manager [-] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1261.610019] env[69328]: DEBUG nova.network.neutron [-] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1261.892808] env[69328]: DEBUG oslo_vmware.api [None req-7d22f608-c81d-4d04-a75e-ab80122e03b5 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274376, 'name': Destroy_Task, 'duration_secs': 0.322256} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1261.893252] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-7d22f608-c81d-4d04-a75e-ab80122e03b5 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Destroyed the VM [ 1261.893594] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7d22f608-c81d-4d04-a75e-ab80122e03b5 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Deleting Snapshot of the VM instance {{(pid=69328) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1261.893960] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-a9b5d299-8403-41de-b184-64cddbdf98c5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.900611] env[69328]: DEBUG oslo_vmware.api [None req-7d22f608-c81d-4d04-a75e-ab80122e03b5 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1261.900611] env[69328]: value = "task-3274377" [ 1261.900611] env[69328]: _type = "Task" [ 1261.900611] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1261.908547] env[69328]: DEBUG oslo_vmware.api [None req-7d22f608-c81d-4d04-a75e-ab80122e03b5 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274377, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1262.087412] env[69328]: DEBUG nova.compute.manager [req-9a510c23-5724-4808-83bb-f063ac58b253 req-5ec04d64-5fc4-4bc3-8a73-b8770edd43f2 service nova] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Received event network-vif-deleted-19978029-822a-48e0-b3c1-9d885b82a5f3 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1262.087661] env[69328]: INFO nova.compute.manager [req-9a510c23-5724-4808-83bb-f063ac58b253 req-5ec04d64-5fc4-4bc3-8a73-b8770edd43f2 service nova] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Neutron deleted interface 19978029-822a-48e0-b3c1-9d885b82a5f3; detaching it from the instance and deleting it from the info cache [ 1262.087839] env[69328]: DEBUG nova.network.neutron [req-9a510c23-5724-4808-83bb-f063ac58b253 req-5ec04d64-5fc4-4bc3-8a73-b8770edd43f2 service nova] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1262.179038] env[69328]: DEBUG nova.network.neutron [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Updating instance_info_cache with network_info: [{"id": "119833be-9532-4d57-aece-6b3a83d11e9f", "address": "fa:16:3e:05:b2:63", "network": {"id": "4031def8-0553-461f-9528-aa338b9d7b2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1834835647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.196", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f357b5a9494b4849a83aa934c5d4e26b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "357d2811-e990-4985-9f9e-b158d10d3699", "external-id": "nsx-vlan-transportzone-641", "segmentation_id": 641, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap119833be-95", "ovs_interfaceid": "119833be-9532-4d57-aece-6b3a83d11e9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1262.410783] env[69328]: DEBUG oslo_vmware.api [None req-7d22f608-c81d-4d04-a75e-ab80122e03b5 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274377, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1262.568049] env[69328]: DEBUG nova.network.neutron [-] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1262.589947] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c73d62cf-bb47-4fb0-b040-84b3ad8af3ac {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.600033] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86a2182c-a898-4a47-ab52-b01c082ffe4d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.625818] env[69328]: DEBUG nova.compute.manager [req-9a510c23-5724-4808-83bb-f063ac58b253 req-5ec04d64-5fc4-4bc3-8a73-b8770edd43f2 service nova] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Detach interface failed, port_id=19978029-822a-48e0-b3c1-9d885b82a5f3, reason: Instance ae46c18e-15ae-4a47-b05a-a143f10b5ab6 could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1262.682264] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Releasing lock "refresh_cache-566c3167-4cf2-4236-812f-dfbf30bbaf6f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1262.911121] env[69328]: DEBUG oslo_vmware.api [None req-7d22f608-c81d-4d04-a75e-ab80122e03b5 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274377, 'name': RemoveSnapshot_Task, 'duration_secs': 0.554075} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1262.911431] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7d22f608-c81d-4d04-a75e-ab80122e03b5 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Deleted Snapshot of the VM instance {{(pid=69328) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1262.911661] env[69328]: INFO nova.compute.manager [None req-7d22f608-c81d-4d04-a75e-ab80122e03b5 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Took 12.37 seconds to snapshot the instance on the hypervisor. [ 1263.022058] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1263.022298] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1263.070471] env[69328]: INFO nova.compute.manager [-] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Took 1.46 seconds to deallocate network for instance. [ 1263.185821] env[69328]: DEBUG nova.compute.manager [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=69328) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 1263.186074] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1263.186337] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1263.458615] env[69328]: DEBUG nova.compute.manager [None req-7d22f608-c81d-4d04-a75e-ab80122e03b5 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Found 3 images (rotation: 2) {{(pid=69328) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 1263.458615] env[69328]: DEBUG nova.compute.manager [None req-7d22f608-c81d-4d04-a75e-ab80122e03b5 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Rotating out 1 backups {{(pid=69328) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5025}} [ 1263.458615] env[69328]: DEBUG nova.compute.manager [None req-7d22f608-c81d-4d04-a75e-ab80122e03b5 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Deleting image 367697f9-6e5d-4706-b316-2b0bb5022954 {{(pid=69328) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5030}} [ 1263.529016] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1263.529016] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1263.529016] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1263.529016] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1263.529016] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1263.529016] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1263.529016] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69328) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1263.529016] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager.update_available_resource {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1263.575701] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b84cb5b2-8901-4684-b270-8bae284c4c9e tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1263.689269] env[69328]: DEBUG nova.objects.instance [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lazy-loading 'migration_context' on Instance uuid 566c3167-4cf2-4236-812f-dfbf30bbaf6f {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1264.031714] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1264.272928] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac8504f6-3201-46ca-9010-ed948560dd23 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.281255] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa739731-1dd3-4d80-b4b2-e1a91de3b9cd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.314643] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa8ff9a7-8d2c-46ac-b35e-6e6819282756 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.323189] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50c2170c-b010-4958-a89b-7cc9dcfa047f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.336978] env[69328]: DEBUG nova.compute.provider_tree [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1264.839974] env[69328]: DEBUG nova.scheduler.client.report [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1264.867392] env[69328]: DEBUG oslo_concurrency.lockutils [None req-34c201cd-1031-4332-a74e-4011d6183736 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquiring lock "0cf68559-5f07-4006-9f7f-59027e31635d" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1264.867634] env[69328]: DEBUG oslo_concurrency.lockutils [None req-34c201cd-1031-4332-a74e-4011d6183736 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "0cf68559-5f07-4006-9f7f-59027e31635d" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1264.867792] env[69328]: DEBUG nova.compute.manager [None req-34c201cd-1031-4332-a74e-4011d6183736 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1264.868680] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28dc1dab-2cd3-433c-a4ff-19711bdf65f9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.875786] env[69328]: DEBUG nova.compute.manager [None req-34c201cd-1031-4332-a74e-4011d6183736 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69328) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1264.876372] env[69328]: DEBUG nova.objects.instance [None req-34c201cd-1031-4332-a74e-4011d6183736 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lazy-loading 'flavor' on Instance uuid 0cf68559-5f07-4006-9f7f-59027e31635d {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1265.851122] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.664s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1265.856093] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b84cb5b2-8901-4684-b270-8bae284c4c9e tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.280s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1265.856320] env[69328]: DEBUG nova.objects.instance [None req-b84cb5b2-8901-4684-b270-8bae284c4c9e tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lazy-loading 'resources' on Instance uuid ae46c18e-15ae-4a47-b05a-a143f10b5ab6 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1265.882290] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-34c201cd-1031-4332-a74e-4011d6183736 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1265.882554] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9ed44075-6ad1-458a-884d-c2f9fbcecb08 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.889776] env[69328]: DEBUG oslo_vmware.api [None req-34c201cd-1031-4332-a74e-4011d6183736 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1265.889776] env[69328]: value = "task-3274378" [ 1265.889776] env[69328]: _type = "Task" [ 1265.889776] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1265.897898] env[69328]: DEBUG oslo_vmware.api [None req-34c201cd-1031-4332-a74e-4011d6183736 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274378, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1266.401977] env[69328]: DEBUG oslo_vmware.api [None req-34c201cd-1031-4332-a74e-4011d6183736 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274378, 'name': PowerOffVM_Task, 'duration_secs': 0.196903} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1266.404279] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-34c201cd-1031-4332-a74e-4011d6183736 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1266.404485] env[69328]: DEBUG nova.compute.manager [None req-34c201cd-1031-4332-a74e-4011d6183736 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1266.405549] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb607c73-62da-47c9-a5a6-435ed0f5fef1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.434041] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa85a0e3-ec6d-4aaa-a523-a6009a8da7e6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.441580] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dff132d3-4644-4625-8a4e-3ae16b1e3580 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.486400] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfea97f7-187d-49d4-aef2-366ebaefcc6b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.496209] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53a72c8f-4dcd-4df6-bd27-63916fb7ebe6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.514216] env[69328]: DEBUG nova.compute.provider_tree [None req-b84cb5b2-8901-4684-b270-8bae284c4c9e tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1266.917664] env[69328]: DEBUG oslo_concurrency.lockutils [None req-34c201cd-1031-4332-a74e-4011d6183736 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "0cf68559-5f07-4006-9f7f-59027e31635d" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.050s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1267.017444] env[69328]: DEBUG nova.scheduler.client.report [None req-b84cb5b2-8901-4684-b270-8bae284c4c9e tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1267.389649] env[69328]: INFO nova.compute.manager [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Swapping old allocation on dict_keys(['149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e']) held by migration 0c842e7a-fde4-43bb-9a5e-0815948fd051 for instance [ 1267.411385] env[69328]: DEBUG nova.scheduler.client.report [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Overwriting current allocation {'allocations': {'149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 174}}, 'project_id': 'f357b5a9494b4849a83aa934c5d4e26b', 'user_id': 'c1d18e6b9e284403a091afd2c3e31c1c', 'consumer_generation': 1} on consumer 566c3167-4cf2-4236-812f-dfbf30bbaf6f {{(pid=69328) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1267.482991] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "refresh_cache-566c3167-4cf2-4236-812f-dfbf30bbaf6f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1267.483223] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquired lock "refresh_cache-566c3167-4cf2-4236-812f-dfbf30bbaf6f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1267.483406] env[69328]: DEBUG nova.network.neutron [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1267.522561] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b84cb5b2-8901-4684-b270-8bae284c4c9e tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.666s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1267.524706] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 3.493s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1267.524887] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1267.525054] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69328) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1267.526632] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee4d0cb3-5052-4f08-a908-8183e89e6ad3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.535759] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98c5a9ff-0283-4959-aa84-29fcb7ef5262 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.543628] env[69328]: INFO nova.scheduler.client.report [None req-b84cb5b2-8901-4684-b270-8bae284c4c9e tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Deleted allocations for instance ae46c18e-15ae-4a47-b05a-a143f10b5ab6 [ 1267.559757] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47c77c5c-ed62-47fa-a866-329f3d8145a3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.566596] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-266336dd-a547-4905-ac31-ab8a59d761ae {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.599257] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180494MB free_disk=116GB free_vcpus=48 pci_devices=None {{(pid=69328) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1267.599417] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1267.599630] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1268.050551] env[69328]: DEBUG nova.compute.manager [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Stashing vm_state: stopped {{(pid=69328) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1268.055156] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b84cb5b2-8901-4684-b270-8bae284c4c9e tempest-AttachVolumeShelveTestJSON-1928499663 tempest-AttachVolumeShelveTestJSON-1928499663-project-member] Lock "ae46c18e-15ae-4a47-b05a-a143f10b5ab6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.069s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1268.191450] env[69328]: DEBUG nova.network.neutron [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Updating instance_info_cache with network_info: [{"id": "119833be-9532-4d57-aece-6b3a83d11e9f", "address": "fa:16:3e:05:b2:63", "network": {"id": "4031def8-0553-461f-9528-aa338b9d7b2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1834835647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.196", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f357b5a9494b4849a83aa934c5d4e26b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "357d2811-e990-4985-9f9e-b158d10d3699", "external-id": "nsx-vlan-transportzone-641", "segmentation_id": 641, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap119833be-95", "ovs_interfaceid": "119833be-9532-4d57-aece-6b3a83d11e9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1268.576229] env[69328]: DEBUG oslo_concurrency.lockutils [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1268.607126] env[69328]: INFO nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Updating resource usage from migration e765385e-3f45-443d-bcb5-57a488d62a8b [ 1268.621753] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance d19f6a2a-3a16-4031-8c20-143ccfd6f5f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1268.621894] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Migration e765385e-3f45-443d-bcb5-57a488d62a8b is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1268.621973] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 0cf68559-5f07-4006-9f7f-59027e31635d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1268.622085] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 566c3167-4cf2-4236-812f-dfbf30bbaf6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1268.622269] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=69328) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1268.622405] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1088MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=69328) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1268.677375] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b35d17d-9005-469c-8a56-8acf09c160bb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.685682] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d92949fe-f124-4d32-9a6a-35ebbf669a95 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.714414] env[69328]: DEBUG oslo_concurrency.lockutils [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Releasing lock "refresh_cache-566c3167-4cf2-4236-812f-dfbf30bbaf6f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1268.714833] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1268.715236] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3bf5faaf-88e1-49dc-8976-08af723b1a73 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.717199] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb67f57c-6c09-4e2e-a95a-ad7ccd26256d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.727364] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c030542e-2581-4afc-8d8e-5cdec737f465 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.731450] env[69328]: DEBUG oslo_vmware.api [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1268.731450] env[69328]: value = "task-3274379" [ 1268.731450] env[69328]: _type = "Task" [ 1268.731450] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1268.742089] env[69328]: DEBUG nova.compute.provider_tree [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1268.747737] env[69328]: DEBUG oslo_vmware.api [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274379, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1269.241290] env[69328]: DEBUG oslo_vmware.api [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274379, 'name': PowerOffVM_Task, 'duration_secs': 0.186193} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1269.241660] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1269.242200] env[69328]: DEBUG nova.virt.hardware [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1269.242449] env[69328]: DEBUG nova.virt.hardware [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1269.242611] env[69328]: DEBUG nova.virt.hardware [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1269.242790] env[69328]: DEBUG nova.virt.hardware [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1269.242937] env[69328]: DEBUG nova.virt.hardware [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1269.243099] env[69328]: DEBUG nova.virt.hardware [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1269.243306] env[69328]: DEBUG nova.virt.hardware [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1269.243465] env[69328]: DEBUG nova.virt.hardware [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1269.243627] env[69328]: DEBUG nova.virt.hardware [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1269.243789] env[69328]: DEBUG nova.virt.hardware [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1269.243959] env[69328]: DEBUG nova.virt.hardware [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1269.249416] env[69328]: DEBUG nova.scheduler.client.report [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1269.252371] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f99666eb-0c90-4401-a814-5ffcb2ffed51 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.263322] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69328) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1269.263494] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.664s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1269.263735] env[69328]: DEBUG oslo_concurrency.lockutils [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.688s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1269.269943] env[69328]: DEBUG oslo_vmware.api [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1269.269943] env[69328]: value = "task-3274380" [ 1269.269943] env[69328]: _type = "Task" [ 1269.269943] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1269.277863] env[69328]: DEBUG oslo_vmware.api [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274380, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1269.768920] env[69328]: INFO nova.compute.claims [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1269.780472] env[69328]: DEBUG oslo_vmware.api [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274380, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.277026] env[69328]: INFO nova.compute.resource_tracker [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Updating resource usage from migration e765385e-3f45-443d-bcb5-57a488d62a8b [ 1270.285374] env[69328]: DEBUG oslo_vmware.api [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274380, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.336450] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02f7b3d0-0820-44ae-a837-f83634e5eeb2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.344040] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41f622ae-18d2-4428-b8c1-aa3ed728a463 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.374579] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0afc2853-f3ce-4748-9fe4-b8c23d4994a9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.381510] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a449ea6-87fd-4a8a-9c2d-344d7cfcbffa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.395641] env[69328]: DEBUG nova.compute.provider_tree [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1270.781726] env[69328]: DEBUG oslo_vmware.api [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274380, 'name': ReconfigVM_Task, 'duration_secs': 1.15459} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1270.782528] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0951df6b-466a-477e-bc8f-e1b20089b83e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.799591] env[69328]: DEBUG nova.virt.hardware [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1270.799815] env[69328]: DEBUG nova.virt.hardware [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1270.799974] env[69328]: DEBUG nova.virt.hardware [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1270.800191] env[69328]: DEBUG nova.virt.hardware [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1270.800338] env[69328]: DEBUG nova.virt.hardware [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1270.800488] env[69328]: DEBUG nova.virt.hardware [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1270.800692] env[69328]: DEBUG nova.virt.hardware [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1270.800852] env[69328]: DEBUG nova.virt.hardware [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1270.801033] env[69328]: DEBUG nova.virt.hardware [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1270.801207] env[69328]: DEBUG nova.virt.hardware [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1270.801380] env[69328]: DEBUG nova.virt.hardware [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1270.802117] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-585061bc-0392-4565-ac00-74f5e6fe5e4b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.806752] env[69328]: DEBUG oslo_vmware.api [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1270.806752] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]523503ee-78bf-73b5-b1d3-fd70228ac183" [ 1270.806752] env[69328]: _type = "Task" [ 1270.806752] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1270.813847] env[69328]: DEBUG oslo_vmware.api [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]523503ee-78bf-73b5-b1d3-fd70228ac183, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.899291] env[69328]: DEBUG nova.scheduler.client.report [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1271.317246] env[69328]: DEBUG oslo_vmware.api [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]523503ee-78bf-73b5-b1d3-fd70228ac183, 'name': SearchDatastore_Task, 'duration_secs': 0.008322} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1271.322522] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Reconfiguring VM instance instance-00000079 to detach disk 2000 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1271.322793] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-793e226d-872c-4026-97ad-0835e1eeecd0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.340408] env[69328]: DEBUG oslo_vmware.api [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1271.340408] env[69328]: value = "task-3274382" [ 1271.340408] env[69328]: _type = "Task" [ 1271.340408] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1271.347814] env[69328]: DEBUG oslo_vmware.api [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274382, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1271.403964] env[69328]: DEBUG oslo_concurrency.lockutils [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.140s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1271.404197] env[69328]: INFO nova.compute.manager [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Migrating [ 1271.850733] env[69328]: DEBUG oslo_vmware.api [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274382, 'name': ReconfigVM_Task, 'duration_secs': 0.173918} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1271.850733] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Reconfigured VM instance instance-00000079 to detach disk 2000 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1271.851364] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa2458a0-2a69-4f96-8a31-a438429f67f6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.872923] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Reconfiguring VM instance instance-00000079 to attach disk [datastore1] 566c3167-4cf2-4236-812f-dfbf30bbaf6f/566c3167-4cf2-4236-812f-dfbf30bbaf6f.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1271.873194] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-96601a1c-bf1f-406b-87bf-d3a3095290a9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.891602] env[69328]: DEBUG oslo_vmware.api [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1271.891602] env[69328]: value = "task-3274383" [ 1271.891602] env[69328]: _type = "Task" [ 1271.891602] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1271.899014] env[69328]: DEBUG oslo_vmware.api [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274383, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1271.919645] env[69328]: DEBUG oslo_concurrency.lockutils [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquiring lock "refresh_cache-0cf68559-5f07-4006-9f7f-59027e31635d" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1271.919851] env[69328]: DEBUG oslo_concurrency.lockutils [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquired lock "refresh_cache-0cf68559-5f07-4006-9f7f-59027e31635d" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1271.920087] env[69328]: DEBUG nova.network.neutron [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1272.402018] env[69328]: DEBUG oslo_vmware.api [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274383, 'name': ReconfigVM_Task, 'duration_secs': 0.26689} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1272.402315] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Reconfigured VM instance instance-00000079 to attach disk [datastore1] 566c3167-4cf2-4236-812f-dfbf30bbaf6f/566c3167-4cf2-4236-812f-dfbf30bbaf6f.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1272.403133] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76e184d5-8207-41ec-bec8-5c52fd539d1e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.420682] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9de51f9b-c97a-4167-bba3-775983b206c6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.439415] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98d993e5-444d-4e5a-9a2e-460a6b7b0eff {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.458495] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aac6dee8-d6c9-48bf-a71e-a1ccb6ab7018 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.464867] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1272.465095] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-79803684-1163-46a7-9515-884ca3e8b7ef {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.470262] env[69328]: DEBUG oslo_vmware.api [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1272.470262] env[69328]: value = "task-3274384" [ 1272.470262] env[69328]: _type = "Task" [ 1272.470262] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1272.478739] env[69328]: DEBUG oslo_vmware.api [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274384, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.631515] env[69328]: DEBUG nova.network.neutron [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Updating instance_info_cache with network_info: [{"id": "d76b0cd3-fa46-430c-b29d-7439c7857ba3", "address": "fa:16:3e:0f:95:97", "network": {"id": "238bfce5-9117-4d8e-8dd7-445d8d894599", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-231958017-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8bbb75992830459c85c818e850261c61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3d7e184-c87f-47a5-8d0d-9fa20e07e669", "external-id": "nsx-vlan-transportzone-746", "segmentation_id": 746, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd76b0cd3-fa", "ovs_interfaceid": "d76b0cd3-fa46-430c-b29d-7439c7857ba3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1272.982287] env[69328]: DEBUG oslo_vmware.api [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274384, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1273.134265] env[69328]: DEBUG oslo_concurrency.lockutils [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Releasing lock "refresh_cache-0cf68559-5f07-4006-9f7f-59027e31635d" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1273.481650] env[69328]: DEBUG oslo_vmware.api [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274384, 'name': PowerOnVM_Task, 'duration_secs': 0.534582} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1273.481998] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1274.494416] env[69328]: INFO nova.compute.manager [None req-a8c436e2-c81b-46ce-87e9-876a20088a18 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Updating instance to original state: 'active' [ 1274.649148] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-758ab3fb-8630-4b9b-a65e-02b0ac108b78 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.668419] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Updating instance '0cf68559-5f07-4006-9f7f-59027e31635d' progress to 0 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1275.174189] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1275.174437] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8527b846-ea36-4d5a-924f-5192a5d83cf3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.182339] env[69328]: DEBUG oslo_vmware.api [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1275.182339] env[69328]: value = "task-3274385" [ 1275.182339] env[69328]: _type = "Task" [ 1275.182339] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.189752] env[69328]: DEBUG oslo_vmware.api [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274385, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.693806] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] VM already powered off {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1275.694245] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Updating instance '0cf68559-5f07-4006-9f7f-59027e31635d' progress to 17 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1276.160182] env[69328]: DEBUG oslo_concurrency.lockutils [None req-48483121-28ea-4c78-a096-c477bc191a95 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "566c3167-4cf2-4236-812f-dfbf30bbaf6f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1276.160442] env[69328]: DEBUG oslo_concurrency.lockutils [None req-48483121-28ea-4c78-a096-c477bc191a95 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "566c3167-4cf2-4236-812f-dfbf30bbaf6f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1276.160651] env[69328]: DEBUG oslo_concurrency.lockutils [None req-48483121-28ea-4c78-a096-c477bc191a95 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "566c3167-4cf2-4236-812f-dfbf30bbaf6f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1276.160834] env[69328]: DEBUG oslo_concurrency.lockutils [None req-48483121-28ea-4c78-a096-c477bc191a95 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "566c3167-4cf2-4236-812f-dfbf30bbaf6f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1276.161017] env[69328]: DEBUG oslo_concurrency.lockutils [None req-48483121-28ea-4c78-a096-c477bc191a95 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "566c3167-4cf2-4236-812f-dfbf30bbaf6f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1276.163158] env[69328]: INFO nova.compute.manager [None req-48483121-28ea-4c78-a096-c477bc191a95 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Terminating instance [ 1276.200138] env[69328]: DEBUG nova.virt.hardware [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:34:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1276.200313] env[69328]: DEBUG nova.virt.hardware [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1276.200463] env[69328]: DEBUG nova.virt.hardware [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1276.200644] env[69328]: DEBUG nova.virt.hardware [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1276.200790] env[69328]: DEBUG nova.virt.hardware [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1276.200936] env[69328]: DEBUG nova.virt.hardware [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1276.201173] env[69328]: DEBUG nova.virt.hardware [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1276.201332] env[69328]: DEBUG nova.virt.hardware [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1276.201496] env[69328]: DEBUG nova.virt.hardware [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1276.201654] env[69328]: DEBUG nova.virt.hardware [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1276.201824] env[69328]: DEBUG nova.virt.hardware [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1276.207103] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-438ed5f5-4c0f-4f2b-836e-160fd06b98f2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.224311] env[69328]: DEBUG oslo_vmware.api [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1276.224311] env[69328]: value = "task-3274386" [ 1276.224311] env[69328]: _type = "Task" [ 1276.224311] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.232087] env[69328]: DEBUG oslo_vmware.api [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274386, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.667210] env[69328]: DEBUG nova.compute.manager [None req-48483121-28ea-4c78-a096-c477bc191a95 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1276.667485] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-48483121-28ea-4c78-a096-c477bc191a95 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1276.668410] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc5fb459-84f5-4ca9-a0dd-977d6d46ce3d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.676240] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-48483121-28ea-4c78-a096-c477bc191a95 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1276.676457] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e1d41e7e-e989-4671-91a9-4863bacf973f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.682233] env[69328]: DEBUG oslo_vmware.api [None req-48483121-28ea-4c78-a096-c477bc191a95 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1276.682233] env[69328]: value = "task-3274387" [ 1276.682233] env[69328]: _type = "Task" [ 1276.682233] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.689477] env[69328]: DEBUG oslo_vmware.api [None req-48483121-28ea-4c78-a096-c477bc191a95 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274387, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.733672] env[69328]: DEBUG oslo_vmware.api [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274386, 'name': ReconfigVM_Task, 'duration_secs': 0.146025} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1276.733996] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Updating instance '0cf68559-5f07-4006-9f7f-59027e31635d' progress to 33 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1277.191798] env[69328]: DEBUG oslo_vmware.api [None req-48483121-28ea-4c78-a096-c477bc191a95 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274387, 'name': PowerOffVM_Task, 'duration_secs': 0.187593} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1277.192060] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-48483121-28ea-4c78-a096-c477bc191a95 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1277.192220] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-48483121-28ea-4c78-a096-c477bc191a95 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1277.192466] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0d3d8114-8f4b-4c32-85a9-99c8da629471 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.240414] env[69328]: DEBUG nova.virt.hardware [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1277.240686] env[69328]: DEBUG nova.virt.hardware [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1277.240780] env[69328]: DEBUG nova.virt.hardware [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1277.240958] env[69328]: DEBUG nova.virt.hardware [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1277.241115] env[69328]: DEBUG nova.virt.hardware [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1277.241273] env[69328]: DEBUG nova.virt.hardware [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1277.241478] env[69328]: DEBUG nova.virt.hardware [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1277.241636] env[69328]: DEBUG nova.virt.hardware [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1277.241803] env[69328]: DEBUG nova.virt.hardware [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1277.241964] env[69328]: DEBUG nova.virt.hardware [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1277.242150] env[69328]: DEBUG nova.virt.hardware [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1277.247282] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Reconfiguring VM instance instance-00000077 to detach disk 2000 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1277.247654] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-47c45b27-ac16-40e0-a341-1a676ecb8261 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.261201] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-48483121-28ea-4c78-a096-c477bc191a95 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1277.261392] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-48483121-28ea-4c78-a096-c477bc191a95 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1277.261569] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-48483121-28ea-4c78-a096-c477bc191a95 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Deleting the datastore file [datastore1] 566c3167-4cf2-4236-812f-dfbf30bbaf6f {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1277.262160] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-31b5eb2b-a033-4732-a8a2-5311adfe85a9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.266883] env[69328]: DEBUG oslo_vmware.api [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1277.266883] env[69328]: value = "task-3274389" [ 1277.266883] env[69328]: _type = "Task" [ 1277.266883] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1277.270880] env[69328]: DEBUG oslo_vmware.api [None req-48483121-28ea-4c78-a096-c477bc191a95 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1277.270880] env[69328]: value = "task-3274390" [ 1277.270880] env[69328]: _type = "Task" [ 1277.270880] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1277.276510] env[69328]: DEBUG oslo_vmware.api [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274389, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.280750] env[69328]: DEBUG oslo_vmware.api [None req-48483121-28ea-4c78-a096-c477bc191a95 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274390, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.780752] env[69328]: DEBUG oslo_vmware.api [None req-48483121-28ea-4c78-a096-c477bc191a95 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274390, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.13216} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1277.783606] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-48483121-28ea-4c78-a096-c477bc191a95 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1277.783797] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-48483121-28ea-4c78-a096-c477bc191a95 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1277.783994] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-48483121-28ea-4c78-a096-c477bc191a95 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1277.784187] env[69328]: INFO nova.compute.manager [None req-48483121-28ea-4c78-a096-c477bc191a95 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1277.784421] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-48483121-28ea-4c78-a096-c477bc191a95 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1277.784619] env[69328]: DEBUG oslo_vmware.api [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274389, 'name': ReconfigVM_Task, 'duration_secs': 0.163374} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1277.784804] env[69328]: DEBUG nova.compute.manager [-] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1277.784897] env[69328]: DEBUG nova.network.neutron [-] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1277.786453] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Reconfigured VM instance instance-00000077 to detach disk 2000 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1277.787178] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd3fb717-5574-4fb1-839b-b51a110e4dd0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.808215] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Reconfiguring VM instance instance-00000077 to attach disk [datastore1] 0cf68559-5f07-4006-9f7f-59027e31635d/0cf68559-5f07-4006-9f7f-59027e31635d.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1277.808733] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8faadc9b-935f-473f-a37a-3063cc77103c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.825647] env[69328]: DEBUG oslo_vmware.api [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1277.825647] env[69328]: value = "task-3274391" [ 1277.825647] env[69328]: _type = "Task" [ 1277.825647] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1277.832959] env[69328]: DEBUG oslo_vmware.api [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274391, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.245833] env[69328]: DEBUG nova.compute.manager [req-ead6e0ae-054e-41bf-803b-d09a77d619ab req-5da97fa2-d10e-430c-b34f-80645694c10b service nova] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Received event network-vif-deleted-119833be-9532-4d57-aece-6b3a83d11e9f {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1278.246103] env[69328]: INFO nova.compute.manager [req-ead6e0ae-054e-41bf-803b-d09a77d619ab req-5da97fa2-d10e-430c-b34f-80645694c10b service nova] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Neutron deleted interface 119833be-9532-4d57-aece-6b3a83d11e9f; detaching it from the instance and deleting it from the info cache [ 1278.246285] env[69328]: DEBUG nova.network.neutron [req-ead6e0ae-054e-41bf-803b-d09a77d619ab req-5da97fa2-d10e-430c-b34f-80645694c10b service nova] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1278.335136] env[69328]: DEBUG oslo_vmware.api [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274391, 'name': ReconfigVM_Task, 'duration_secs': 0.239606} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.335407] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Reconfigured VM instance instance-00000077 to attach disk [datastore1] 0cf68559-5f07-4006-9f7f-59027e31635d/0cf68559-5f07-4006-9f7f-59027e31635d.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1278.335673] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Updating instance '0cf68559-5f07-4006-9f7f-59027e31635d' progress to 50 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1278.729585] env[69328]: DEBUG nova.network.neutron [-] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1278.748305] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-59046a90-ac5b-49f1-815b-4938993c0e64 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.758333] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fd6c399-11b7-4d11-9639-6d6485f30f47 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.783837] env[69328]: DEBUG nova.compute.manager [req-ead6e0ae-054e-41bf-803b-d09a77d619ab req-5da97fa2-d10e-430c-b34f-80645694c10b service nova] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Detach interface failed, port_id=119833be-9532-4d57-aece-6b3a83d11e9f, reason: Instance 566c3167-4cf2-4236-812f-dfbf30bbaf6f could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1278.842464] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d637f587-7f13-4844-952a-b177c25b9e1b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.862395] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a162c7f6-58ae-4cce-9bef-054f73b1dac5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.879442] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Updating instance '0cf68559-5f07-4006-9f7f-59027e31635d' progress to 67 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1279.233380] env[69328]: INFO nova.compute.manager [-] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Took 1.45 seconds to deallocate network for instance. [ 1279.416213] env[69328]: DEBUG nova.network.neutron [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Port d76b0cd3-fa46-430c-b29d-7439c7857ba3 binding to destination host cpu-1 is already ACTIVE {{(pid=69328) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1279.743255] env[69328]: DEBUG oslo_concurrency.lockutils [None req-48483121-28ea-4c78-a096-c477bc191a95 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1279.743544] env[69328]: DEBUG oslo_concurrency.lockutils [None req-48483121-28ea-4c78-a096-c477bc191a95 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1279.743770] env[69328]: DEBUG nova.objects.instance [None req-48483121-28ea-4c78-a096-c477bc191a95 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lazy-loading 'resources' on Instance uuid 566c3167-4cf2-4236-812f-dfbf30bbaf6f {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1280.307483] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66a80627-9ce9-4303-ba4a-ac2ebe0d9709 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.315396] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d944673b-caa5-4f20-9d43-8e2f847f0115 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.346336] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b89c07a7-d9ec-4e1b-a359-1ccba2888bdf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.353239] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05a2bbdd-884a-4808-985b-4f601d8cd7ff {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.366038] env[69328]: DEBUG nova.compute.provider_tree [None req-48483121-28ea-4c78-a096-c477bc191a95 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1280.436684] env[69328]: DEBUG oslo_concurrency.lockutils [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquiring lock "0cf68559-5f07-4006-9f7f-59027e31635d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1280.436890] env[69328]: DEBUG oslo_concurrency.lockutils [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "0cf68559-5f07-4006-9f7f-59027e31635d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1280.437074] env[69328]: DEBUG oslo_concurrency.lockutils [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "0cf68559-5f07-4006-9f7f-59027e31635d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1280.869753] env[69328]: DEBUG nova.scheduler.client.report [None req-48483121-28ea-4c78-a096-c477bc191a95 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1281.374663] env[69328]: DEBUG oslo_concurrency.lockutils [None req-48483121-28ea-4c78-a096-c477bc191a95 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.631s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1281.392788] env[69328]: INFO nova.scheduler.client.report [None req-48483121-28ea-4c78-a096-c477bc191a95 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Deleted allocations for instance 566c3167-4cf2-4236-812f-dfbf30bbaf6f [ 1281.475432] env[69328]: DEBUG oslo_concurrency.lockutils [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquiring lock "refresh_cache-0cf68559-5f07-4006-9f7f-59027e31635d" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1281.475633] env[69328]: DEBUG oslo_concurrency.lockutils [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquired lock "refresh_cache-0cf68559-5f07-4006-9f7f-59027e31635d" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1281.475811] env[69328]: DEBUG nova.network.neutron [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1281.900202] env[69328]: DEBUG oslo_concurrency.lockutils [None req-48483121-28ea-4c78-a096-c477bc191a95 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "566c3167-4cf2-4236-812f-dfbf30bbaf6f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.740s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1282.175261] env[69328]: DEBUG nova.network.neutron [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Updating instance_info_cache with network_info: [{"id": "d76b0cd3-fa46-430c-b29d-7439c7857ba3", "address": "fa:16:3e:0f:95:97", "network": {"id": "238bfce5-9117-4d8e-8dd7-445d8d894599", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-231958017-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8bbb75992830459c85c818e850261c61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3d7e184-c87f-47a5-8d0d-9fa20e07e669", "external-id": "nsx-vlan-transportzone-746", "segmentation_id": 746, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd76b0cd3-fa", "ovs_interfaceid": "d76b0cd3-fa46-430c-b29d-7439c7857ba3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1282.677753] env[69328]: DEBUG oslo_concurrency.lockutils [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Releasing lock "refresh_cache-0cf68559-5f07-4006-9f7f-59027e31635d" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1283.199195] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ee64938-aec3-4874-bc0e-455d948814db {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.219396] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67f6b884-919a-44ee-aa11-3e94cc07f0d4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.226309] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Updating instance '0cf68559-5f07-4006-9f7f-59027e31635d' progress to 83 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1283.334060] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "55107d36-c16b-43f9-b436-0de8d9dfd0ca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1283.334060] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "55107d36-c16b-43f9-b436-0de8d9dfd0ca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1283.732506] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-00ba451a-c098-45cd-98ac-61f765cde7a8 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Updating instance '0cf68559-5f07-4006-9f7f-59027e31635d' progress to 100 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1283.836085] env[69328]: DEBUG nova.compute.manager [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1284.357630] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1284.357901] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1284.359855] env[69328]: INFO nova.compute.claims [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1285.427745] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5a2f8a1-12dd-4bd7-8346-0f978c956ae7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.435385] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb258717-53fe-457a-b576-7c46a2a7f302 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.468444] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07a345a3-3f7e-450a-aa65-e06acbfae206 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.475122] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2004543e-a879-4da7-a278-d84e0aa49937 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.487831] env[69328]: DEBUG nova.compute.provider_tree [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1285.991053] env[69328]: DEBUG nova.scheduler.client.report [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1286.210472] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e31744b9-9784-474f-aedf-9b284b29b48f tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquiring lock "0cf68559-5f07-4006-9f7f-59027e31635d" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1286.210711] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e31744b9-9784-474f-aedf-9b284b29b48f tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "0cf68559-5f07-4006-9f7f-59027e31635d" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1286.210878] env[69328]: DEBUG nova.compute.manager [None req-e31744b9-9784-474f-aedf-9b284b29b48f tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Going to confirm migration 9 {{(pid=69328) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1286.495655] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.138s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1286.496231] env[69328]: DEBUG nova.compute.manager [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1286.747941] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e31744b9-9784-474f-aedf-9b284b29b48f tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquiring lock "refresh_cache-0cf68559-5f07-4006-9f7f-59027e31635d" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1286.748149] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e31744b9-9784-474f-aedf-9b284b29b48f tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquired lock "refresh_cache-0cf68559-5f07-4006-9f7f-59027e31635d" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1286.748329] env[69328]: DEBUG nova.network.neutron [None req-e31744b9-9784-474f-aedf-9b284b29b48f tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1286.748563] env[69328]: DEBUG nova.objects.instance [None req-e31744b9-9784-474f-aedf-9b284b29b48f tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lazy-loading 'info_cache' on Instance uuid 0cf68559-5f07-4006-9f7f-59027e31635d {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1287.003789] env[69328]: DEBUG nova.compute.utils [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1287.005207] env[69328]: DEBUG nova.compute.manager [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1287.005346] env[69328]: DEBUG nova.network.neutron [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1287.053107] env[69328]: DEBUG nova.policy [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c1d18e6b9e284403a091afd2c3e31c1c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f357b5a9494b4849a83aa934c5d4e26b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 1287.346138] env[69328]: DEBUG nova.network.neutron [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Successfully created port: d61a3758-8f85-4e39-94d0-95fa0087b49c {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1287.508971] env[69328]: DEBUG nova.compute.manager [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1288.006832] env[69328]: DEBUG nova.network.neutron [None req-e31744b9-9784-474f-aedf-9b284b29b48f tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Updating instance_info_cache with network_info: [{"id": "d76b0cd3-fa46-430c-b29d-7439c7857ba3", "address": "fa:16:3e:0f:95:97", "network": {"id": "238bfce5-9117-4d8e-8dd7-445d8d894599", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-231958017-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8bbb75992830459c85c818e850261c61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3d7e184-c87f-47a5-8d0d-9fa20e07e669", "external-id": "nsx-vlan-transportzone-746", "segmentation_id": 746, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd76b0cd3-fa", "ovs_interfaceid": "d76b0cd3-fa46-430c-b29d-7439c7857ba3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1288.509892] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e31744b9-9784-474f-aedf-9b284b29b48f tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Releasing lock "refresh_cache-0cf68559-5f07-4006-9f7f-59027e31635d" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1288.510280] env[69328]: DEBUG nova.objects.instance [None req-e31744b9-9784-474f-aedf-9b284b29b48f tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lazy-loading 'migration_context' on Instance uuid 0cf68559-5f07-4006-9f7f-59027e31635d {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1288.518141] env[69328]: DEBUG nova.compute.manager [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1288.544546] env[69328]: DEBUG nova.virt.hardware [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1288.544781] env[69328]: DEBUG nova.virt.hardware [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1288.544938] env[69328]: DEBUG nova.virt.hardware [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1288.545146] env[69328]: DEBUG nova.virt.hardware [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1288.545299] env[69328]: DEBUG nova.virt.hardware [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1288.545445] env[69328]: DEBUG nova.virt.hardware [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1288.545645] env[69328]: DEBUG nova.virt.hardware [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1288.545799] env[69328]: DEBUG nova.virt.hardware [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1288.545962] env[69328]: DEBUG nova.virt.hardware [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1288.546250] env[69328]: DEBUG nova.virt.hardware [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1288.546438] env[69328]: DEBUG nova.virt.hardware [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1288.547317] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-515eddc3-7df2-4556-8f8c-ff9168f1c035 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.555142] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5be1ef7f-3a51-426b-a0e2-0e489dc80f01 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.707085] env[69328]: DEBUG nova.compute.manager [req-95adc40c-3fc6-4aed-b2e5-15fd65e6047e req-67b9cbe7-4bf3-4583-87bb-b56c9540ba05 service nova] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Received event network-vif-plugged-d61a3758-8f85-4e39-94d0-95fa0087b49c {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1288.707334] env[69328]: DEBUG oslo_concurrency.lockutils [req-95adc40c-3fc6-4aed-b2e5-15fd65e6047e req-67b9cbe7-4bf3-4583-87bb-b56c9540ba05 service nova] Acquiring lock "55107d36-c16b-43f9-b436-0de8d9dfd0ca-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1288.707547] env[69328]: DEBUG oslo_concurrency.lockutils [req-95adc40c-3fc6-4aed-b2e5-15fd65e6047e req-67b9cbe7-4bf3-4583-87bb-b56c9540ba05 service nova] Lock "55107d36-c16b-43f9-b436-0de8d9dfd0ca-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1288.707705] env[69328]: DEBUG oslo_concurrency.lockutils [req-95adc40c-3fc6-4aed-b2e5-15fd65e6047e req-67b9cbe7-4bf3-4583-87bb-b56c9540ba05 service nova] Lock "55107d36-c16b-43f9-b436-0de8d9dfd0ca-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1288.707873] env[69328]: DEBUG nova.compute.manager [req-95adc40c-3fc6-4aed-b2e5-15fd65e6047e req-67b9cbe7-4bf3-4583-87bb-b56c9540ba05 service nova] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] No waiting events found dispatching network-vif-plugged-d61a3758-8f85-4e39-94d0-95fa0087b49c {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1288.708191] env[69328]: WARNING nova.compute.manager [req-95adc40c-3fc6-4aed-b2e5-15fd65e6047e req-67b9cbe7-4bf3-4583-87bb-b56c9540ba05 service nova] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Received unexpected event network-vif-plugged-d61a3758-8f85-4e39-94d0-95fa0087b49c for instance with vm_state building and task_state spawning. [ 1288.784556] env[69328]: DEBUG nova.network.neutron [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Successfully updated port: d61a3758-8f85-4e39-94d0-95fa0087b49c {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1289.012964] env[69328]: DEBUG nova.objects.base [None req-e31744b9-9784-474f-aedf-9b284b29b48f tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Object Instance<0cf68559-5f07-4006-9f7f-59027e31635d> lazy-loaded attributes: info_cache,migration_context {{(pid=69328) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1289.014141] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99e0f635-78b2-4d42-a12c-84f36a3fbee6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.036636] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b479b5d2-a627-4a5a-b384-8640359ddea2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.042720] env[69328]: DEBUG oslo_vmware.api [None req-e31744b9-9784-474f-aedf-9b284b29b48f tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1289.042720] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]521c83ea-519a-5c77-3318-6fa05ce7bc7d" [ 1289.042720] env[69328]: _type = "Task" [ 1289.042720] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.050440] env[69328]: DEBUG oslo_vmware.api [None req-e31744b9-9784-474f-aedf-9b284b29b48f tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]521c83ea-519a-5c77-3318-6fa05ce7bc7d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.288476] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "refresh_cache-55107d36-c16b-43f9-b436-0de8d9dfd0ca" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1289.288476] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquired lock "refresh_cache-55107d36-c16b-43f9-b436-0de8d9dfd0ca" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1289.288759] env[69328]: DEBUG nova.network.neutron [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1289.482268] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e1b8678e-84e7-4087-8794-d8a59dc5cbff tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Acquiring lock "d19f6a2a-3a16-4031-8c20-143ccfd6f5f5" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1289.482540] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e1b8678e-84e7-4087-8794-d8a59dc5cbff tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lock "d19f6a2a-3a16-4031-8c20-143ccfd6f5f5" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1289.552628] env[69328]: DEBUG oslo_vmware.api [None req-e31744b9-9784-474f-aedf-9b284b29b48f tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]521c83ea-519a-5c77-3318-6fa05ce7bc7d, 'name': SearchDatastore_Task, 'duration_secs': 0.009094} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1289.552970] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e31744b9-9784-474f-aedf-9b284b29b48f tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1289.553130] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e31744b9-9784-474f-aedf-9b284b29b48f tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1289.823092] env[69328]: DEBUG nova.network.neutron [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1289.936506] env[69328]: DEBUG nova.network.neutron [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Updating instance_info_cache with network_info: [{"id": "d61a3758-8f85-4e39-94d0-95fa0087b49c", "address": "fa:16:3e:b9:82:8b", "network": {"id": "4031def8-0553-461f-9528-aa338b9d7b2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1834835647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f357b5a9494b4849a83aa934c5d4e26b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "357d2811-e990-4985-9f9e-b158d10d3699", "external-id": "nsx-vlan-transportzone-641", "segmentation_id": 641, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd61a3758-8f", "ovs_interfaceid": "d61a3758-8f85-4e39-94d0-95fa0087b49c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1289.985261] env[69328]: INFO nova.compute.manager [None req-e1b8678e-84e7-4087-8794-d8a59dc5cbff tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Detaching volume 01dd0cf8-98ef-4f40-b76d-352f74c03810 [ 1290.014478] env[69328]: INFO nova.virt.block_device [None req-e1b8678e-84e7-4087-8794-d8a59dc5cbff tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Attempting to driver detach volume 01dd0cf8-98ef-4f40-b76d-352f74c03810 from mountpoint /dev/sdb [ 1290.014714] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1b8678e-84e7-4087-8794-d8a59dc5cbff tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Volume detach. Driver type: vmdk {{(pid=69328) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1290.014896] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1b8678e-84e7-4087-8794-d8a59dc5cbff tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653985', 'volume_id': '01dd0cf8-98ef-4f40-b76d-352f74c03810', 'name': 'volume-01dd0cf8-98ef-4f40-b76d-352f74c03810', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd19f6a2a-3a16-4031-8c20-143ccfd6f5f5', 'attached_at': '', 'detached_at': '', 'volume_id': '01dd0cf8-98ef-4f40-b76d-352f74c03810', 'serial': '01dd0cf8-98ef-4f40-b76d-352f74c03810'} {{(pid=69328) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1290.016025] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79df7f28-83f1-4ef1-9abc-58f92aea8922 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.036684] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe929342-a4ba-4c51-a6b2-b4dc232a3e21 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.043672] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfe4a7ad-4eec-43d6-bcd4-0d565888a07c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.066184] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-549fbdf9-5b9e-4e73-8c9d-f6a0384e465e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.082175] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1b8678e-84e7-4087-8794-d8a59dc5cbff tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] The volume has not been displaced from its original location: [datastore1] volume-01dd0cf8-98ef-4f40-b76d-352f74c03810/volume-01dd0cf8-98ef-4f40-b76d-352f74c03810.vmdk. No consolidation needed. {{(pid=69328) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1290.087336] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1b8678e-84e7-4087-8794-d8a59dc5cbff tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Reconfiguring VM instance instance-00000078 to detach disk 2001 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1290.089854] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fc1e348c-0c61-4d2f-bbb3-4b4c197a5062 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.107768] env[69328]: DEBUG oslo_vmware.api [None req-e1b8678e-84e7-4087-8794-d8a59dc5cbff tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Waiting for the task: (returnval){ [ 1290.107768] env[69328]: value = "task-3274392" [ 1290.107768] env[69328]: _type = "Task" [ 1290.107768] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1290.115333] env[69328]: DEBUG oslo_vmware.api [None req-e1b8678e-84e7-4087-8794-d8a59dc5cbff tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274392, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.150913] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96343763-f8fe-476f-89d3-c0d4af0ddebd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.157707] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e959a674-2f4e-4cf8-8d72-be20bcceba3a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.187522] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52864f95-c531-4338-9f78-6648e406c28d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.194539] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aec289a1-47fb-4ef4-b8e0-92fb4bd0740f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.207704] env[69328]: DEBUG nova.compute.provider_tree [None req-e31744b9-9784-474f-aedf-9b284b29b48f tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1290.439681] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Releasing lock "refresh_cache-55107d36-c16b-43f9-b436-0de8d9dfd0ca" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1290.440020] env[69328]: DEBUG nova.compute.manager [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Instance network_info: |[{"id": "d61a3758-8f85-4e39-94d0-95fa0087b49c", "address": "fa:16:3e:b9:82:8b", "network": {"id": "4031def8-0553-461f-9528-aa338b9d7b2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1834835647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f357b5a9494b4849a83aa934c5d4e26b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "357d2811-e990-4985-9f9e-b158d10d3699", "external-id": "nsx-vlan-transportzone-641", "segmentation_id": 641, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd61a3758-8f", "ovs_interfaceid": "d61a3758-8f85-4e39-94d0-95fa0087b49c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1290.440470] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b9:82:8b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '357d2811-e990-4985-9f9e-b158d10d3699', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd61a3758-8f85-4e39-94d0-95fa0087b49c', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1290.447865] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1290.448080] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1290.448302] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-01a70137-5d71-4f7f-9c1d-6de8d73a3027 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.467410] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1290.467410] env[69328]: value = "task-3274393" [ 1290.467410] env[69328]: _type = "Task" [ 1290.467410] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1290.474972] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274393, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.617544] env[69328]: DEBUG oslo_vmware.api [None req-e1b8678e-84e7-4087-8794-d8a59dc5cbff tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274392, 'name': ReconfigVM_Task, 'duration_secs': 0.229431} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1290.617887] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1b8678e-84e7-4087-8794-d8a59dc5cbff tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Reconfigured VM instance instance-00000078 to detach disk 2001 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1290.622277] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ba821a86-ef3b-4f32-a256-f7eac3ba683a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.636443] env[69328]: DEBUG oslo_vmware.api [None req-e1b8678e-84e7-4087-8794-d8a59dc5cbff tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Waiting for the task: (returnval){ [ 1290.636443] env[69328]: value = "task-3274394" [ 1290.636443] env[69328]: _type = "Task" [ 1290.636443] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1290.643943] env[69328]: DEBUG oslo_vmware.api [None req-e1b8678e-84e7-4087-8794-d8a59dc5cbff tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274394, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.711154] env[69328]: DEBUG nova.scheduler.client.report [None req-e31744b9-9784-474f-aedf-9b284b29b48f tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1290.735040] env[69328]: DEBUG nova.compute.manager [req-fd09a7eb-d896-4b3f-98e5-bbe02798fa46 req-cb4f5586-557f-42b5-adc1-1bc7b78e8ca9 service nova] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Received event network-changed-d61a3758-8f85-4e39-94d0-95fa0087b49c {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1290.735321] env[69328]: DEBUG nova.compute.manager [req-fd09a7eb-d896-4b3f-98e5-bbe02798fa46 req-cb4f5586-557f-42b5-adc1-1bc7b78e8ca9 service nova] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Refreshing instance network info cache due to event network-changed-d61a3758-8f85-4e39-94d0-95fa0087b49c. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1290.735616] env[69328]: DEBUG oslo_concurrency.lockutils [req-fd09a7eb-d896-4b3f-98e5-bbe02798fa46 req-cb4f5586-557f-42b5-adc1-1bc7b78e8ca9 service nova] Acquiring lock "refresh_cache-55107d36-c16b-43f9-b436-0de8d9dfd0ca" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1290.735812] env[69328]: DEBUG oslo_concurrency.lockutils [req-fd09a7eb-d896-4b3f-98e5-bbe02798fa46 req-cb4f5586-557f-42b5-adc1-1bc7b78e8ca9 service nova] Acquired lock "refresh_cache-55107d36-c16b-43f9-b436-0de8d9dfd0ca" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1290.736179] env[69328]: DEBUG nova.network.neutron [req-fd09a7eb-d896-4b3f-98e5-bbe02798fa46 req-cb4f5586-557f-42b5-adc1-1bc7b78e8ca9 service nova] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Refreshing network info cache for port d61a3758-8f85-4e39-94d0-95fa0087b49c {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1290.977973] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274393, 'name': CreateVM_Task, 'duration_secs': 0.29788} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1290.978147] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1290.978787] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1290.978951] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1290.979349] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1290.979606] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-37a862e8-adb1-4ede-ba39-e158361f4410 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.984503] env[69328]: DEBUG oslo_vmware.api [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1290.984503] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52a87927-66c0-aedf-a7f2-99336a1cad08" [ 1290.984503] env[69328]: _type = "Task" [ 1290.984503] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1290.991453] env[69328]: DEBUG oslo_vmware.api [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52a87927-66c0-aedf-a7f2-99336a1cad08, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1291.146174] env[69328]: DEBUG oslo_vmware.api [None req-e1b8678e-84e7-4087-8794-d8a59dc5cbff tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274394, 'name': ReconfigVM_Task, 'duration_secs': 0.133322} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1291.146472] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1b8678e-84e7-4087-8794-d8a59dc5cbff tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653985', 'volume_id': '01dd0cf8-98ef-4f40-b76d-352f74c03810', 'name': 'volume-01dd0cf8-98ef-4f40-b76d-352f74c03810', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd19f6a2a-3a16-4031-8c20-143ccfd6f5f5', 'attached_at': '', 'detached_at': '', 'volume_id': '01dd0cf8-98ef-4f40-b76d-352f74c03810', 'serial': '01dd0cf8-98ef-4f40-b76d-352f74c03810'} {{(pid=69328) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1291.452953] env[69328]: DEBUG nova.network.neutron [req-fd09a7eb-d896-4b3f-98e5-bbe02798fa46 req-cb4f5586-557f-42b5-adc1-1bc7b78e8ca9 service nova] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Updated VIF entry in instance network info cache for port d61a3758-8f85-4e39-94d0-95fa0087b49c. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1291.453332] env[69328]: DEBUG nova.network.neutron [req-fd09a7eb-d896-4b3f-98e5-bbe02798fa46 req-cb4f5586-557f-42b5-adc1-1bc7b78e8ca9 service nova] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Updating instance_info_cache with network_info: [{"id": "d61a3758-8f85-4e39-94d0-95fa0087b49c", "address": "fa:16:3e:b9:82:8b", "network": {"id": "4031def8-0553-461f-9528-aa338b9d7b2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1834835647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f357b5a9494b4849a83aa934c5d4e26b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "357d2811-e990-4985-9f9e-b158d10d3699", "external-id": "nsx-vlan-transportzone-641", "segmentation_id": 641, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd61a3758-8f", "ovs_interfaceid": "d61a3758-8f85-4e39-94d0-95fa0087b49c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1291.494585] env[69328]: DEBUG oslo_vmware.api [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52a87927-66c0-aedf-a7f2-99336a1cad08, 'name': SearchDatastore_Task, 'duration_secs': 0.009653} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1291.494880] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1291.495115] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1291.495351] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1291.495496] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1291.495671] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1291.495911] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0c04e69a-3fd1-494b-b0a3-0db039fdd98f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.504160] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1291.504341] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1291.505026] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-32d7ba7b-4bed-4149-8a92-8ddd0a303f7a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.510066] env[69328]: DEBUG oslo_vmware.api [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1291.510066] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52bdc86e-a06f-5d19-fc7a-eeddcd473ba6" [ 1291.510066] env[69328]: _type = "Task" [ 1291.510066] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1291.516917] env[69328]: DEBUG oslo_vmware.api [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52bdc86e-a06f-5d19-fc7a-eeddcd473ba6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1291.684881] env[69328]: DEBUG nova.objects.instance [None req-e1b8678e-84e7-4087-8794-d8a59dc5cbff tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lazy-loading 'flavor' on Instance uuid d19f6a2a-3a16-4031-8c20-143ccfd6f5f5 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1291.721936] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e31744b9-9784-474f-aedf-9b284b29b48f tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.169s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1291.722157] env[69328]: DEBUG nova.compute.manager [None req-e31744b9-9784-474f-aedf-9b284b29b48f tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Resized/migrated instance is powered off. Setting vm_state to 'stopped'. {{(pid=69328) _confirm_resize /opt/stack/nova/nova/compute/manager.py:5376}} [ 1291.956444] env[69328]: DEBUG oslo_concurrency.lockutils [req-fd09a7eb-d896-4b3f-98e5-bbe02798fa46 req-cb4f5586-557f-42b5-adc1-1bc7b78e8ca9 service nova] Releasing lock "refresh_cache-55107d36-c16b-43f9-b436-0de8d9dfd0ca" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1292.020663] env[69328]: DEBUG oslo_vmware.api [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52bdc86e-a06f-5d19-fc7a-eeddcd473ba6, 'name': SearchDatastore_Task, 'duration_secs': 0.008358} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1292.021456] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f7ba0d6f-7c34-4d8a-8da7-e17498d73f8f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.026596] env[69328]: DEBUG oslo_vmware.api [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1292.026596] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5261d93f-3dbc-109b-df17-245c5ef2c577" [ 1292.026596] env[69328]: _type = "Task" [ 1292.026596] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1292.033389] env[69328]: DEBUG oslo_vmware.api [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5261d93f-3dbc-109b-df17-245c5ef2c577, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1292.285720] env[69328]: INFO nova.scheduler.client.report [None req-e31744b9-9784-474f-aedf-9b284b29b48f tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Deleted allocation for migration e765385e-3f45-443d-bcb5-57a488d62a8b [ 1292.536778] env[69328]: DEBUG oslo_vmware.api [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5261d93f-3dbc-109b-df17-245c5ef2c577, 'name': SearchDatastore_Task, 'duration_secs': 0.009446} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1292.537047] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1292.537321] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 55107d36-c16b-43f9-b436-0de8d9dfd0ca/55107d36-c16b-43f9-b436-0de8d9dfd0ca.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1292.537572] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-96bf54c6-9755-4982-b750-ae7ffe2b3da7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.544670] env[69328]: DEBUG oslo_vmware.api [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1292.544670] env[69328]: value = "task-3274395" [ 1292.544670] env[69328]: _type = "Task" [ 1292.544670] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1292.552290] env[69328]: DEBUG oslo_vmware.api [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274395, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1292.645297] env[69328]: DEBUG oslo_concurrency.lockutils [None req-34e9c518-868c-4432-940d-874b49bbb6f9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Acquiring lock "d19f6a2a-3a16-4031-8c20-143ccfd6f5f5" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1292.691922] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e1b8678e-84e7-4087-8794-d8a59dc5cbff tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lock "d19f6a2a-3a16-4031-8c20-143ccfd6f5f5" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.209s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1292.693106] env[69328]: DEBUG oslo_concurrency.lockutils [None req-34e9c518-868c-4432-940d-874b49bbb6f9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lock "d19f6a2a-3a16-4031-8c20-143ccfd6f5f5" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.048s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1292.693303] env[69328]: DEBUG nova.compute.manager [None req-34e9c518-868c-4432-940d-874b49bbb6f9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1292.694459] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c2bcc56-ccf8-4e97-98bc-4a2c45084b48 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.701886] env[69328]: DEBUG nova.compute.manager [None req-34e9c518-868c-4432-940d-874b49bbb6f9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69328) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1292.702623] env[69328]: DEBUG nova.objects.instance [None req-34e9c518-868c-4432-940d-874b49bbb6f9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lazy-loading 'flavor' on Instance uuid d19f6a2a-3a16-4031-8c20-143ccfd6f5f5 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1292.792522] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e31744b9-9784-474f-aedf-9b284b29b48f tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "0cf68559-5f07-4006-9f7f-59027e31635d" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.582s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1293.054255] env[69328]: DEBUG oslo_vmware.api [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274395, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.410947} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1293.054523] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 55107d36-c16b-43f9-b436-0de8d9dfd0ca/55107d36-c16b-43f9-b436-0de8d9dfd0ca.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1293.054696] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1293.054926] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6c80850f-79af-4114-a4c1-edbc62d63439 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.057110] env[69328]: DEBUG nova.objects.instance [None req-7c5f4a27-4713-465f-8830-f0d1c598728e tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lazy-loading 'flavor' on Instance uuid 0cf68559-5f07-4006-9f7f-59027e31635d {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1293.063500] env[69328]: DEBUG oslo_vmware.api [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1293.063500] env[69328]: value = "task-3274396" [ 1293.063500] env[69328]: _type = "Task" [ 1293.063500] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1293.070191] env[69328]: DEBUG oslo_vmware.api [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274396, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.568213] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7c5f4a27-4713-465f-8830-f0d1c598728e tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquiring lock "refresh_cache-0cf68559-5f07-4006-9f7f-59027e31635d" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1293.568399] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7c5f4a27-4713-465f-8830-f0d1c598728e tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquired lock "refresh_cache-0cf68559-5f07-4006-9f7f-59027e31635d" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1293.568582] env[69328]: DEBUG nova.network.neutron [None req-7c5f4a27-4713-465f-8830-f0d1c598728e tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1293.568751] env[69328]: DEBUG nova.objects.instance [None req-7c5f4a27-4713-465f-8830-f0d1c598728e tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lazy-loading 'info_cache' on Instance uuid 0cf68559-5f07-4006-9f7f-59027e31635d {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1293.575152] env[69328]: DEBUG oslo_vmware.api [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274396, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063872} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1293.575393] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1293.576168] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b290641a-1230-4dd0-b85d-c1973510bf64 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.599023] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Reconfiguring VM instance instance-0000007c to attach disk [datastore1] 55107d36-c16b-43f9-b436-0de8d9dfd0ca/55107d36-c16b-43f9-b436-0de8d9dfd0ca.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1293.599533] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-53164e58-dd5c-4360-89db-d5535c8e73df {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.619100] env[69328]: DEBUG oslo_vmware.api [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1293.619100] env[69328]: value = "task-3274397" [ 1293.619100] env[69328]: _type = "Task" [ 1293.619100] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1293.626499] env[69328]: DEBUG oslo_vmware.api [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274397, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.709415] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-34e9c518-868c-4432-940d-874b49bbb6f9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1293.709729] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d12e6af1-32db-4d58-b9a0-1f16ae961e6f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.716658] env[69328]: DEBUG oslo_vmware.api [None req-34e9c518-868c-4432-940d-874b49bbb6f9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Waiting for the task: (returnval){ [ 1293.716658] env[69328]: value = "task-3274398" [ 1293.716658] env[69328]: _type = "Task" [ 1293.716658] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1293.724399] env[69328]: DEBUG oslo_vmware.api [None req-34e9c518-868c-4432-940d-874b49bbb6f9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274398, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.073039] env[69328]: DEBUG nova.objects.base [None req-7c5f4a27-4713-465f-8830-f0d1c598728e tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Object Instance<0cf68559-5f07-4006-9f7f-59027e31635d> lazy-loaded attributes: flavor,info_cache {{(pid=69328) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1294.128747] env[69328]: DEBUG oslo_vmware.api [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274397, 'name': ReconfigVM_Task, 'duration_secs': 0.280001} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1294.129015] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Reconfigured VM instance instance-0000007c to attach disk [datastore1] 55107d36-c16b-43f9-b436-0de8d9dfd0ca/55107d36-c16b-43f9-b436-0de8d9dfd0ca.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1294.129640] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6432a4bd-456c-492e-9bb2-fd707a64b7fa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.135817] env[69328]: DEBUG oslo_vmware.api [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1294.135817] env[69328]: value = "task-3274399" [ 1294.135817] env[69328]: _type = "Task" [ 1294.135817] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1294.142837] env[69328]: DEBUG oslo_vmware.api [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274399, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.225863] env[69328]: DEBUG oslo_vmware.api [None req-34e9c518-868c-4432-940d-874b49bbb6f9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274398, 'name': PowerOffVM_Task, 'duration_secs': 0.202757} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1294.226265] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-34e9c518-868c-4432-940d-874b49bbb6f9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1294.226488] env[69328]: DEBUG nova.compute.manager [None req-34e9c518-868c-4432-940d-874b49bbb6f9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1294.227268] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7df18ed1-0302-4c15-850c-b5f4d13cf4bd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.645058] env[69328]: DEBUG oslo_vmware.api [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274399, 'name': Rename_Task, 'duration_secs': 0.186548} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1294.645333] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1294.645569] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fd620290-cc5e-4921-a51c-3a690ac5f8e4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.652027] env[69328]: DEBUG oslo_vmware.api [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1294.652027] env[69328]: value = "task-3274400" [ 1294.652027] env[69328]: _type = "Task" [ 1294.652027] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1294.659521] env[69328]: DEBUG oslo_vmware.api [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274400, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.738373] env[69328]: DEBUG oslo_concurrency.lockutils [None req-34e9c518-868c-4432-940d-874b49bbb6f9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lock "d19f6a2a-3a16-4031-8c20-143ccfd6f5f5" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.045s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1294.824724] env[69328]: DEBUG nova.network.neutron [None req-7c5f4a27-4713-465f-8830-f0d1c598728e tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Updating instance_info_cache with network_info: [{"id": "d76b0cd3-fa46-430c-b29d-7439c7857ba3", "address": "fa:16:3e:0f:95:97", "network": {"id": "238bfce5-9117-4d8e-8dd7-445d8d894599", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-231958017-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8bbb75992830459c85c818e850261c61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3d7e184-c87f-47a5-8d0d-9fa20e07e669", "external-id": "nsx-vlan-transportzone-746", "segmentation_id": 746, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd76b0cd3-fa", "ovs_interfaceid": "d76b0cd3-fa46-430c-b29d-7439c7857ba3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1295.024408] env[69328]: DEBUG nova.objects.instance [None req-0f986e73-767f-4b59-bd60-78c966218915 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lazy-loading 'flavor' on Instance uuid d19f6a2a-3a16-4031-8c20-143ccfd6f5f5 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1295.162418] env[69328]: DEBUG oslo_vmware.api [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274400, 'name': PowerOnVM_Task, 'duration_secs': 0.45478} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1295.162727] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1295.162878] env[69328]: INFO nova.compute.manager [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Took 6.64 seconds to spawn the instance on the hypervisor. [ 1295.163067] env[69328]: DEBUG nova.compute.manager [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1295.163826] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3b724d4-9eb0-4417-90be-61bc8780a03b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.328451] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7c5f4a27-4713-465f-8830-f0d1c598728e tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Releasing lock "refresh_cache-0cf68559-5f07-4006-9f7f-59027e31635d" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1295.529629] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0f986e73-767f-4b59-bd60-78c966218915 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Acquiring lock "refresh_cache-d19f6a2a-3a16-4031-8c20-143ccfd6f5f5" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1295.529881] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0f986e73-767f-4b59-bd60-78c966218915 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Acquired lock "refresh_cache-d19f6a2a-3a16-4031-8c20-143ccfd6f5f5" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1295.530178] env[69328]: DEBUG nova.network.neutron [None req-0f986e73-767f-4b59-bd60-78c966218915 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1295.530453] env[69328]: DEBUG nova.objects.instance [None req-0f986e73-767f-4b59-bd60-78c966218915 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lazy-loading 'info_cache' on Instance uuid d19f6a2a-3a16-4031-8c20-143ccfd6f5f5 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1295.679606] env[69328]: INFO nova.compute.manager [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Took 11.34 seconds to build instance. [ 1296.034037] env[69328]: DEBUG nova.objects.base [None req-0f986e73-767f-4b59-bd60-78c966218915 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=69328) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1296.181371] env[69328]: DEBUG oslo_concurrency.lockutils [None req-2f76f826-f1d1-4cf6-9161-ec6a33225a1a tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "55107d36-c16b-43f9-b436-0de8d9dfd0ca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.848s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1296.333953] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c5f4a27-4713-465f-8830-f0d1c598728e tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1296.334349] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8d785c0a-54b1-4aea-8531-1fd0144f5d23 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.343356] env[69328]: DEBUG oslo_vmware.api [None req-7c5f4a27-4713-465f-8830-f0d1c598728e tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1296.343356] env[69328]: value = "task-3274401" [ 1296.343356] env[69328]: _type = "Task" [ 1296.343356] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.349921] env[69328]: DEBUG oslo_vmware.api [None req-7c5f4a27-4713-465f-8830-f0d1c598728e tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274401, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.357017] env[69328]: DEBUG nova.compute.manager [req-8fcafa36-109e-45fc-892d-6629c7056095 req-27362e67-f085-4aa8-a4ee-c8d9bdcb57f6 service nova] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Received event network-changed-d61a3758-8f85-4e39-94d0-95fa0087b49c {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1296.357226] env[69328]: DEBUG nova.compute.manager [req-8fcafa36-109e-45fc-892d-6629c7056095 req-27362e67-f085-4aa8-a4ee-c8d9bdcb57f6 service nova] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Refreshing instance network info cache due to event network-changed-d61a3758-8f85-4e39-94d0-95fa0087b49c. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1296.357444] env[69328]: DEBUG oslo_concurrency.lockutils [req-8fcafa36-109e-45fc-892d-6629c7056095 req-27362e67-f085-4aa8-a4ee-c8d9bdcb57f6 service nova] Acquiring lock "refresh_cache-55107d36-c16b-43f9-b436-0de8d9dfd0ca" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1296.357590] env[69328]: DEBUG oslo_concurrency.lockutils [req-8fcafa36-109e-45fc-892d-6629c7056095 req-27362e67-f085-4aa8-a4ee-c8d9bdcb57f6 service nova] Acquired lock "refresh_cache-55107d36-c16b-43f9-b436-0de8d9dfd0ca" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1296.357752] env[69328]: DEBUG nova.network.neutron [req-8fcafa36-109e-45fc-892d-6629c7056095 req-27362e67-f085-4aa8-a4ee-c8d9bdcb57f6 service nova] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Refreshing network info cache for port d61a3758-8f85-4e39-94d0-95fa0087b49c {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1296.733965] env[69328]: DEBUG nova.network.neutron [None req-0f986e73-767f-4b59-bd60-78c966218915 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Updating instance_info_cache with network_info: [{"id": "69e73394-845a-4108-8b2f-6b23a000d98c", "address": "fa:16:3e:e9:f3:15", "network": {"id": "6d64fb31-1957-4722-a4b3-46b946bfb65a", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1232247602-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "278be2f8452946b9ab9c4bce8f9a7557", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5bd281ed-ae39-485f-90ee-4ee27994b5b0", "external-id": "nsx-vlan-transportzone-305", "segmentation_id": 305, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap69e73394-84", "ovs_interfaceid": "69e73394-845a-4108-8b2f-6b23a000d98c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1296.851932] env[69328]: DEBUG oslo_vmware.api [None req-7c5f4a27-4713-465f-8830-f0d1c598728e tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274401, 'name': PowerOnVM_Task, 'duration_secs': 0.406811} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1296.852253] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c5f4a27-4713-465f-8830-f0d1c598728e tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1296.852460] env[69328]: DEBUG nova.compute.manager [None req-7c5f4a27-4713-465f-8830-f0d1c598728e tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1296.853321] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48aad795-0953-48c0-a364-987e51b2a085 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.124547] env[69328]: DEBUG nova.network.neutron [req-8fcafa36-109e-45fc-892d-6629c7056095 req-27362e67-f085-4aa8-a4ee-c8d9bdcb57f6 service nova] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Updated VIF entry in instance network info cache for port d61a3758-8f85-4e39-94d0-95fa0087b49c. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1297.124952] env[69328]: DEBUG nova.network.neutron [req-8fcafa36-109e-45fc-892d-6629c7056095 req-27362e67-f085-4aa8-a4ee-c8d9bdcb57f6 service nova] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Updating instance_info_cache with network_info: [{"id": "d61a3758-8f85-4e39-94d0-95fa0087b49c", "address": "fa:16:3e:b9:82:8b", "network": {"id": "4031def8-0553-461f-9528-aa338b9d7b2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1834835647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.196", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f357b5a9494b4849a83aa934c5d4e26b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "357d2811-e990-4985-9f9e-b158d10d3699", "external-id": "nsx-vlan-transportzone-641", "segmentation_id": 641, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd61a3758-8f", "ovs_interfaceid": "d61a3758-8f85-4e39-94d0-95fa0087b49c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1297.237354] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0f986e73-767f-4b59-bd60-78c966218915 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Releasing lock "refresh_cache-d19f6a2a-3a16-4031-8c20-143ccfd6f5f5" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1297.628238] env[69328]: DEBUG oslo_concurrency.lockutils [req-8fcafa36-109e-45fc-892d-6629c7056095 req-27362e67-f085-4aa8-a4ee-c8d9bdcb57f6 service nova] Releasing lock "refresh_cache-55107d36-c16b-43f9-b436-0de8d9dfd0ca" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1297.664528] env[69328]: DEBUG oslo_concurrency.lockutils [None req-620cc5a3-847f-4c6e-9208-0292bf47eb31 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquiring lock "0cf68559-5f07-4006-9f7f-59027e31635d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1297.664769] env[69328]: DEBUG oslo_concurrency.lockutils [None req-620cc5a3-847f-4c6e-9208-0292bf47eb31 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "0cf68559-5f07-4006-9f7f-59027e31635d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1297.664970] env[69328]: DEBUG oslo_concurrency.lockutils [None req-620cc5a3-847f-4c6e-9208-0292bf47eb31 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquiring lock "0cf68559-5f07-4006-9f7f-59027e31635d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1297.665173] env[69328]: DEBUG oslo_concurrency.lockutils [None req-620cc5a3-847f-4c6e-9208-0292bf47eb31 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "0cf68559-5f07-4006-9f7f-59027e31635d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1297.665346] env[69328]: DEBUG oslo_concurrency.lockutils [None req-620cc5a3-847f-4c6e-9208-0292bf47eb31 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "0cf68559-5f07-4006-9f7f-59027e31635d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1297.667926] env[69328]: INFO nova.compute.manager [None req-620cc5a3-847f-4c6e-9208-0292bf47eb31 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Terminating instance [ 1298.172062] env[69328]: DEBUG nova.compute.manager [None req-620cc5a3-847f-4c6e-9208-0292bf47eb31 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1298.172506] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-620cc5a3-847f-4c6e-9208-0292bf47eb31 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1298.173214] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5da1d057-0881-474d-9f5e-7bc5dabc8eca {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.181395] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-620cc5a3-847f-4c6e-9208-0292bf47eb31 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1298.181620] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9b8dff63-1983-4628-842b-f27901b14583 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.187087] env[69328]: DEBUG oslo_vmware.api [None req-620cc5a3-847f-4c6e-9208-0292bf47eb31 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1298.187087] env[69328]: value = "task-3274402" [ 1298.187087] env[69328]: _type = "Task" [ 1298.187087] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1298.194364] env[69328]: DEBUG oslo_vmware.api [None req-620cc5a3-847f-4c6e-9208-0292bf47eb31 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274402, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.243249] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f986e73-767f-4b59-bd60-78c966218915 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1298.243641] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5e5d81a1-b0dc-40bd-bd82-bfd022416ea1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.250674] env[69328]: DEBUG oslo_vmware.api [None req-0f986e73-767f-4b59-bd60-78c966218915 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Waiting for the task: (returnval){ [ 1298.250674] env[69328]: value = "task-3274403" [ 1298.250674] env[69328]: _type = "Task" [ 1298.250674] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1298.258354] env[69328]: DEBUG oslo_vmware.api [None req-0f986e73-767f-4b59-bd60-78c966218915 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274403, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.696915] env[69328]: DEBUG oslo_vmware.api [None req-620cc5a3-847f-4c6e-9208-0292bf47eb31 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274402, 'name': PowerOffVM_Task, 'duration_secs': 0.245387} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1298.697291] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-620cc5a3-847f-4c6e-9208-0292bf47eb31 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1298.697476] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-620cc5a3-847f-4c6e-9208-0292bf47eb31 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1298.697722] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5192156a-c410-4ab5-971e-f3b4ee710c2e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.760207] env[69328]: DEBUG oslo_vmware.api [None req-0f986e73-767f-4b59-bd60-78c966218915 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274403, 'name': PowerOnVM_Task, 'duration_secs': 0.388586} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1298.760472] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f986e73-767f-4b59-bd60-78c966218915 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1298.760667] env[69328]: DEBUG nova.compute.manager [None req-0f986e73-767f-4b59-bd60-78c966218915 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1298.761436] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdb77dff-6d90-478d-b616-f1b6ea66231a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.778888] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-620cc5a3-847f-4c6e-9208-0292bf47eb31 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1298.779103] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-620cc5a3-847f-4c6e-9208-0292bf47eb31 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1298.779294] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-620cc5a3-847f-4c6e-9208-0292bf47eb31 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Deleting the datastore file [datastore1] 0cf68559-5f07-4006-9f7f-59027e31635d {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1298.779538] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e4cf001a-3754-4742-b09f-67fc5c1bd6c6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.785443] env[69328]: DEBUG oslo_vmware.api [None req-620cc5a3-847f-4c6e-9208-0292bf47eb31 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1298.785443] env[69328]: value = "task-3274405" [ 1298.785443] env[69328]: _type = "Task" [ 1298.785443] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1298.792849] env[69328]: DEBUG oslo_vmware.api [None req-620cc5a3-847f-4c6e-9208-0292bf47eb31 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274405, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.294990] env[69328]: DEBUG oslo_vmware.api [None req-620cc5a3-847f-4c6e-9208-0292bf47eb31 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274405, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.796677] env[69328]: DEBUG oslo_vmware.api [None req-620cc5a3-847f-4c6e-9208-0292bf47eb31 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274405, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.750363} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1299.796944] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-620cc5a3-847f-4c6e-9208-0292bf47eb31 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1299.797126] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-620cc5a3-847f-4c6e-9208-0292bf47eb31 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1299.797300] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-620cc5a3-847f-4c6e-9208-0292bf47eb31 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1299.797468] env[69328]: INFO nova.compute.manager [None req-620cc5a3-847f-4c6e-9208-0292bf47eb31 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Took 1.63 seconds to destroy the instance on the hypervisor. [ 1299.797724] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-620cc5a3-847f-4c6e-9208-0292bf47eb31 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1299.797980] env[69328]: DEBUG nova.compute.manager [-] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1299.798094] env[69328]: DEBUG nova.network.neutron [-] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1300.179257] env[69328]: DEBUG nova.compute.manager [req-a9c5edbf-f42f-460d-a2ad-86cba64fb135 req-532dfa82-c966-4312-814c-c0d1a2f83512 service nova] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Received event network-vif-deleted-d76b0cd3-fa46-430c-b29d-7439c7857ba3 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1300.179464] env[69328]: INFO nova.compute.manager [req-a9c5edbf-f42f-460d-a2ad-86cba64fb135 req-532dfa82-c966-4312-814c-c0d1a2f83512 service nova] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Neutron deleted interface d76b0cd3-fa46-430c-b29d-7439c7857ba3; detaching it from the instance and deleting it from the info cache [ 1300.179619] env[69328]: DEBUG nova.network.neutron [req-a9c5edbf-f42f-460d-a2ad-86cba64fb135 req-532dfa82-c966-4312-814c-c0d1a2f83512 service nova] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1300.663726] env[69328]: DEBUG nova.network.neutron [-] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1300.682375] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a8859e7a-57db-4764-bfc2-e200d29a2828 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.692729] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-112cc911-a976-403b-af44-62473ed4745a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.719125] env[69328]: DEBUG nova.compute.manager [req-a9c5edbf-f42f-460d-a2ad-86cba64fb135 req-532dfa82-c966-4312-814c-c0d1a2f83512 service nova] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Detach interface failed, port_id=d76b0cd3-fa46-430c-b29d-7439c7857ba3, reason: Instance 0cf68559-5f07-4006-9f7f-59027e31635d could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1301.166924] env[69328]: INFO nova.compute.manager [-] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Took 1.37 seconds to deallocate network for instance. [ 1301.673454] env[69328]: DEBUG oslo_concurrency.lockutils [None req-620cc5a3-847f-4c6e-9208-0292bf47eb31 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1301.673829] env[69328]: DEBUG oslo_concurrency.lockutils [None req-620cc5a3-847f-4c6e-9208-0292bf47eb31 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1301.673936] env[69328]: DEBUG oslo_concurrency.lockutils [None req-620cc5a3-847f-4c6e-9208-0292bf47eb31 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1301.699502] env[69328]: INFO nova.scheduler.client.report [None req-620cc5a3-847f-4c6e-9208-0292bf47eb31 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Deleted allocations for instance 0cf68559-5f07-4006-9f7f-59027e31635d [ 1302.207256] env[69328]: DEBUG oslo_concurrency.lockutils [None req-620cc5a3-847f-4c6e-9208-0292bf47eb31 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "0cf68559-5f07-4006-9f7f-59027e31635d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.542s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1303.582664] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquiring lock "1a0e084a-f7b2-4f2e-b508-33caeed2ffeb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1303.582991] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "1a0e084a-f7b2-4f2e-b508-33caeed2ffeb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1304.085848] env[69328]: DEBUG nova.compute.manager [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1304.606441] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1304.606733] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1304.608229] env[69328]: INFO nova.compute.claims [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1305.663032] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22d611cd-ed03-461c-81d4-d17e377acda9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.671252] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fff7c548-1ef6-40a9-90d7-0840bbec9cbd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.700686] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b196b23-e85a-4ede-94da-3c06cca852a8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.707773] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df7e741d-acf6-4b67-a424-02e836d157fa {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.720611] env[69328]: DEBUG nova.compute.provider_tree [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1306.224192] env[69328]: DEBUG nova.scheduler.client.report [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1306.729978] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.123s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1306.730554] env[69328]: DEBUG nova.compute.manager [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1307.235189] env[69328]: DEBUG nova.compute.utils [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1307.236700] env[69328]: DEBUG nova.compute.manager [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1307.236864] env[69328]: DEBUG nova.network.neutron [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1307.285974] env[69328]: DEBUG nova.policy [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a07713f537e84711bc559a085d1e05f1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8bbb75992830459c85c818e850261c61', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 1307.530109] env[69328]: DEBUG nova.network.neutron [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Successfully created port: 096b3ef6-28d7-4463-a0b2-884dd086287d {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1307.740138] env[69328]: DEBUG nova.compute.manager [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1308.749815] env[69328]: DEBUG nova.compute.manager [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1308.775651] env[69328]: DEBUG nova.virt.hardware [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1308.775896] env[69328]: DEBUG nova.virt.hardware [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1308.776066] env[69328]: DEBUG nova.virt.hardware [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1308.776250] env[69328]: DEBUG nova.virt.hardware [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1308.776423] env[69328]: DEBUG nova.virt.hardware [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1308.776576] env[69328]: DEBUG nova.virt.hardware [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1308.776781] env[69328]: DEBUG nova.virt.hardware [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1308.776935] env[69328]: DEBUG nova.virt.hardware [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1308.777112] env[69328]: DEBUG nova.virt.hardware [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1308.777279] env[69328]: DEBUG nova.virt.hardware [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1308.777479] env[69328]: DEBUG nova.virt.hardware [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1308.778354] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdefd407-4e76-4f4b-a40e-93e041dce098 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.786169] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d96ea5e-e934-46b7-b648-afee3b9522d1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.863469] env[69328]: DEBUG nova.compute.manager [req-2c66131b-a132-42a1-8ac0-8b5bf7e9f254 req-d0583042-c56a-4849-a166-adf6e09379a7 service nova] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Received event network-vif-plugged-096b3ef6-28d7-4463-a0b2-884dd086287d {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1308.863683] env[69328]: DEBUG oslo_concurrency.lockutils [req-2c66131b-a132-42a1-8ac0-8b5bf7e9f254 req-d0583042-c56a-4849-a166-adf6e09379a7 service nova] Acquiring lock "1a0e084a-f7b2-4f2e-b508-33caeed2ffeb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1308.864022] env[69328]: DEBUG oslo_concurrency.lockutils [req-2c66131b-a132-42a1-8ac0-8b5bf7e9f254 req-d0583042-c56a-4849-a166-adf6e09379a7 service nova] Lock "1a0e084a-f7b2-4f2e-b508-33caeed2ffeb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1308.864096] env[69328]: DEBUG oslo_concurrency.lockutils [req-2c66131b-a132-42a1-8ac0-8b5bf7e9f254 req-d0583042-c56a-4849-a166-adf6e09379a7 service nova] Lock "1a0e084a-f7b2-4f2e-b508-33caeed2ffeb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1308.864221] env[69328]: DEBUG nova.compute.manager [req-2c66131b-a132-42a1-8ac0-8b5bf7e9f254 req-d0583042-c56a-4849-a166-adf6e09379a7 service nova] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] No waiting events found dispatching network-vif-plugged-096b3ef6-28d7-4463-a0b2-884dd086287d {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1308.864416] env[69328]: WARNING nova.compute.manager [req-2c66131b-a132-42a1-8ac0-8b5bf7e9f254 req-d0583042-c56a-4849-a166-adf6e09379a7 service nova] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Received unexpected event network-vif-plugged-096b3ef6-28d7-4463-a0b2-884dd086287d for instance with vm_state building and task_state spawning. [ 1308.940226] env[69328]: DEBUG nova.network.neutron [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Successfully updated port: 096b3ef6-28d7-4463-a0b2-884dd086287d {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1309.442471] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquiring lock "refresh_cache-1a0e084a-f7b2-4f2e-b508-33caeed2ffeb" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1309.442634] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquired lock "refresh_cache-1a0e084a-f7b2-4f2e-b508-33caeed2ffeb" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1309.442786] env[69328]: DEBUG nova.network.neutron [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1309.975104] env[69328]: DEBUG nova.network.neutron [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1310.089162] env[69328]: DEBUG nova.network.neutron [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Updating instance_info_cache with network_info: [{"id": "096b3ef6-28d7-4463-a0b2-884dd086287d", "address": "fa:16:3e:6a:a4:57", "network": {"id": "238bfce5-9117-4d8e-8dd7-445d8d894599", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-231958017-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8bbb75992830459c85c818e850261c61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3d7e184-c87f-47a5-8d0d-9fa20e07e669", "external-id": "nsx-vlan-transportzone-746", "segmentation_id": 746, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap096b3ef6-28", "ovs_interfaceid": "096b3ef6-28d7-4463-a0b2-884dd086287d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1310.592151] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Releasing lock "refresh_cache-1a0e084a-f7b2-4f2e-b508-33caeed2ffeb" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1310.592497] env[69328]: DEBUG nova.compute.manager [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Instance network_info: |[{"id": "096b3ef6-28d7-4463-a0b2-884dd086287d", "address": "fa:16:3e:6a:a4:57", "network": {"id": "238bfce5-9117-4d8e-8dd7-445d8d894599", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-231958017-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8bbb75992830459c85c818e850261c61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3d7e184-c87f-47a5-8d0d-9fa20e07e669", "external-id": "nsx-vlan-transportzone-746", "segmentation_id": 746, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap096b3ef6-28", "ovs_interfaceid": "096b3ef6-28d7-4463-a0b2-884dd086287d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1310.592924] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6a:a4:57', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f3d7e184-c87f-47a5-8d0d-9fa20e07e669', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '096b3ef6-28d7-4463-a0b2-884dd086287d', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1310.600398] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1310.600607] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1310.600828] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-64ad8487-0c1e-482b-b033-8660d31eaa40 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.621691] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1310.621691] env[69328]: value = "task-3274406" [ 1310.621691] env[69328]: _type = "Task" [ 1310.621691] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1310.629187] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274406, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.892796] env[69328]: DEBUG nova.compute.manager [req-97e0dfba-5b01-4fb4-8c9b-ae3b4183d007 req-e625eb5f-f566-4417-87fa-fb4a50bcbec2 service nova] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Received event network-changed-096b3ef6-28d7-4463-a0b2-884dd086287d {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1310.892997] env[69328]: DEBUG nova.compute.manager [req-97e0dfba-5b01-4fb4-8c9b-ae3b4183d007 req-e625eb5f-f566-4417-87fa-fb4a50bcbec2 service nova] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Refreshing instance network info cache due to event network-changed-096b3ef6-28d7-4463-a0b2-884dd086287d. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1310.893233] env[69328]: DEBUG oslo_concurrency.lockutils [req-97e0dfba-5b01-4fb4-8c9b-ae3b4183d007 req-e625eb5f-f566-4417-87fa-fb4a50bcbec2 service nova] Acquiring lock "refresh_cache-1a0e084a-f7b2-4f2e-b508-33caeed2ffeb" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1310.893380] env[69328]: DEBUG oslo_concurrency.lockutils [req-97e0dfba-5b01-4fb4-8c9b-ae3b4183d007 req-e625eb5f-f566-4417-87fa-fb4a50bcbec2 service nova] Acquired lock "refresh_cache-1a0e084a-f7b2-4f2e-b508-33caeed2ffeb" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1310.893544] env[69328]: DEBUG nova.network.neutron [req-97e0dfba-5b01-4fb4-8c9b-ae3b4183d007 req-e625eb5f-f566-4417-87fa-fb4a50bcbec2 service nova] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Refreshing network info cache for port 096b3ef6-28d7-4463-a0b2-884dd086287d {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1311.131938] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274406, 'name': CreateVM_Task, 'duration_secs': 0.319175} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1311.132265] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1311.132751] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1311.132910] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1311.133249] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1311.133498] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2881f9b6-aaa7-4ea4-80d7-8a5033794721 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.138970] env[69328]: DEBUG oslo_vmware.api [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1311.138970] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52553ddd-1dd2-98d7-14c7-508dc606540b" [ 1311.138970] env[69328]: _type = "Task" [ 1311.138970] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1311.146564] env[69328]: DEBUG oslo_vmware.api [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52553ddd-1dd2-98d7-14c7-508dc606540b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.562231] env[69328]: DEBUG nova.network.neutron [req-97e0dfba-5b01-4fb4-8c9b-ae3b4183d007 req-e625eb5f-f566-4417-87fa-fb4a50bcbec2 service nova] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Updated VIF entry in instance network info cache for port 096b3ef6-28d7-4463-a0b2-884dd086287d. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1311.562585] env[69328]: DEBUG nova.network.neutron [req-97e0dfba-5b01-4fb4-8c9b-ae3b4183d007 req-e625eb5f-f566-4417-87fa-fb4a50bcbec2 service nova] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Updating instance_info_cache with network_info: [{"id": "096b3ef6-28d7-4463-a0b2-884dd086287d", "address": "fa:16:3e:6a:a4:57", "network": {"id": "238bfce5-9117-4d8e-8dd7-445d8d894599", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-231958017-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8bbb75992830459c85c818e850261c61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3d7e184-c87f-47a5-8d0d-9fa20e07e669", "external-id": "nsx-vlan-transportzone-746", "segmentation_id": 746, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap096b3ef6-28", "ovs_interfaceid": "096b3ef6-28d7-4463-a0b2-884dd086287d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1311.648912] env[69328]: DEBUG oslo_vmware.api [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52553ddd-1dd2-98d7-14c7-508dc606540b, 'name': SearchDatastore_Task, 'duration_secs': 0.009284} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1311.649203] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1311.649432] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1311.649660] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1311.649803] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1311.649975] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1311.650226] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6bb756a1-132a-496f-89a8-0cb0d2aaafe9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.658428] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1311.658597] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1311.659280] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-592d10a5-be73-4a65-8e4b-9ef479cd17cb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.663750] env[69328]: DEBUG oslo_vmware.api [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1311.663750] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52962ff6-6bdc-1602-bd20-e72d7d13d7a4" [ 1311.663750] env[69328]: _type = "Task" [ 1311.663750] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1311.670575] env[69328]: DEBUG oslo_vmware.api [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52962ff6-6bdc-1602-bd20-e72d7d13d7a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.065810] env[69328]: DEBUG oslo_concurrency.lockutils [req-97e0dfba-5b01-4fb4-8c9b-ae3b4183d007 req-e625eb5f-f566-4417-87fa-fb4a50bcbec2 service nova] Releasing lock "refresh_cache-1a0e084a-f7b2-4f2e-b508-33caeed2ffeb" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1312.174367] env[69328]: DEBUG oslo_vmware.api [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52962ff6-6bdc-1602-bd20-e72d7d13d7a4, 'name': SearchDatastore_Task, 'duration_secs': 0.007678} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.175140] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b766d09-0123-448d-a5a8-e9aa21558025 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.182213] env[69328]: DEBUG oslo_vmware.api [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1312.182213] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]521057c9-d699-5ed4-224d-8231f826a783" [ 1312.182213] env[69328]: _type = "Task" [ 1312.182213] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.189656] env[69328]: DEBUG oslo_vmware.api [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]521057c9-d699-5ed4-224d-8231f826a783, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.692354] env[69328]: DEBUG oslo_vmware.api [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]521057c9-d699-5ed4-224d-8231f826a783, 'name': SearchDatastore_Task, 'duration_secs': 0.010907} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.692608] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1312.692874] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb/1a0e084a-f7b2-4f2e-b508-33caeed2ffeb.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1312.693141] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-73e407da-2e13-426c-878e-2b01517a4e95 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.698980] env[69328]: DEBUG oslo_vmware.api [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1312.698980] env[69328]: value = "task-3274407" [ 1312.698980] env[69328]: _type = "Task" [ 1312.698980] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.706233] env[69328]: DEBUG oslo_vmware.api [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274407, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.208395] env[69328]: DEBUG oslo_vmware.api [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274407, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.446095} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1313.208773] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb/1a0e084a-f7b2-4f2e-b508-33caeed2ffeb.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1313.208850] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1313.209142] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-214a5b90-cc79-4961-8767-85e888b80989 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.215356] env[69328]: DEBUG oslo_vmware.api [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1313.215356] env[69328]: value = "task-3274408" [ 1313.215356] env[69328]: _type = "Task" [ 1313.215356] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1313.223489] env[69328]: DEBUG oslo_vmware.api [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274408, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.724850] env[69328]: DEBUG oslo_vmware.api [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274408, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.058704} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1313.725123] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1313.726301] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e60b9fa7-6243-494b-af40-ca9b621e84d7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.749818] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Reconfiguring VM instance instance-0000007d to attach disk [datastore1] 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb/1a0e084a-f7b2-4f2e-b508-33caeed2ffeb.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1313.750075] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-45255542-9535-467d-a815-07db531a247c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.769816] env[69328]: DEBUG oslo_vmware.api [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1313.769816] env[69328]: value = "task-3274409" [ 1313.769816] env[69328]: _type = "Task" [ 1313.769816] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1313.777362] env[69328]: DEBUG oslo_vmware.api [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274409, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.280054] env[69328]: DEBUG oslo_vmware.api [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274409, 'name': ReconfigVM_Task, 'duration_secs': 0.294233} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1314.280054] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Reconfigured VM instance instance-0000007d to attach disk [datastore1] 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb/1a0e084a-f7b2-4f2e-b508-33caeed2ffeb.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1314.280054] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ee4c4c57-6b47-4f53-a30d-6cf5641cc64d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.286321] env[69328]: DEBUG oslo_vmware.api [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1314.286321] env[69328]: value = "task-3274410" [ 1314.286321] env[69328]: _type = "Task" [ 1314.286321] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.296417] env[69328]: DEBUG oslo_vmware.api [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274410, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.795997] env[69328]: DEBUG oslo_vmware.api [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274410, 'name': Rename_Task, 'duration_secs': 0.160998} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1314.796287] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1314.796563] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d401709d-20b0-402e-9036-4b954d6c5998 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.802909] env[69328]: DEBUG oslo_vmware.api [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1314.802909] env[69328]: value = "task-3274411" [ 1314.802909] env[69328]: _type = "Task" [ 1314.802909] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.811597] env[69328]: DEBUG oslo_vmware.api [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274411, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.313689] env[69328]: DEBUG oslo_vmware.api [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274411, 'name': PowerOnVM_Task, 'duration_secs': 0.444691} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.313689] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1315.314101] env[69328]: INFO nova.compute.manager [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Took 6.56 seconds to spawn the instance on the hypervisor. [ 1315.314101] env[69328]: DEBUG nova.compute.manager [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1315.314757] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1744b8bf-ed4e-4a04-a328-0d3b5916c36b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.831652] env[69328]: INFO nova.compute.manager [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Took 11.24 seconds to build instance. [ 1316.334252] env[69328]: DEBUG oslo_concurrency.lockutils [None req-e00a405a-108b-46b4-bbc2-6c05f3f2c921 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "1a0e084a-f7b2-4f2e-b508-33caeed2ffeb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.751s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1316.817482] env[69328]: DEBUG nova.compute.manager [req-782cf739-506c-4bd3-a448-2c64d4e852f2 req-7907388c-0c30-4a70-b73c-16ca31491be1 service nova] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Received event network-changed-096b3ef6-28d7-4463-a0b2-884dd086287d {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1316.817679] env[69328]: DEBUG nova.compute.manager [req-782cf739-506c-4bd3-a448-2c64d4e852f2 req-7907388c-0c30-4a70-b73c-16ca31491be1 service nova] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Refreshing instance network info cache due to event network-changed-096b3ef6-28d7-4463-a0b2-884dd086287d. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1316.817889] env[69328]: DEBUG oslo_concurrency.lockutils [req-782cf739-506c-4bd3-a448-2c64d4e852f2 req-7907388c-0c30-4a70-b73c-16ca31491be1 service nova] Acquiring lock "refresh_cache-1a0e084a-f7b2-4f2e-b508-33caeed2ffeb" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1316.818158] env[69328]: DEBUG oslo_concurrency.lockutils [req-782cf739-506c-4bd3-a448-2c64d4e852f2 req-7907388c-0c30-4a70-b73c-16ca31491be1 service nova] Acquired lock "refresh_cache-1a0e084a-f7b2-4f2e-b508-33caeed2ffeb" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1316.818366] env[69328]: DEBUG nova.network.neutron [req-782cf739-506c-4bd3-a448-2c64d4e852f2 req-7907388c-0c30-4a70-b73c-16ca31491be1 service nova] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Refreshing network info cache for port 096b3ef6-28d7-4463-a0b2-884dd086287d {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1317.512569] env[69328]: DEBUG nova.network.neutron [req-782cf739-506c-4bd3-a448-2c64d4e852f2 req-7907388c-0c30-4a70-b73c-16ca31491be1 service nova] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Updated VIF entry in instance network info cache for port 096b3ef6-28d7-4463-a0b2-884dd086287d. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1317.513434] env[69328]: DEBUG nova.network.neutron [req-782cf739-506c-4bd3-a448-2c64d4e852f2 req-7907388c-0c30-4a70-b73c-16ca31491be1 service nova] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Updating instance_info_cache with network_info: [{"id": "096b3ef6-28d7-4463-a0b2-884dd086287d", "address": "fa:16:3e:6a:a4:57", "network": {"id": "238bfce5-9117-4d8e-8dd7-445d8d894599", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-231958017-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8bbb75992830459c85c818e850261c61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3d7e184-c87f-47a5-8d0d-9fa20e07e669", "external-id": "nsx-vlan-transportzone-746", "segmentation_id": 746, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap096b3ef6-28", "ovs_interfaceid": "096b3ef6-28d7-4463-a0b2-884dd086287d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1318.015775] env[69328]: DEBUG oslo_concurrency.lockutils [req-782cf739-506c-4bd3-a448-2c64d4e852f2 req-7907388c-0c30-4a70-b73c-16ca31491be1 service nova] Releasing lock "refresh_cache-1a0e084a-f7b2-4f2e-b508-33caeed2ffeb" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1329.265553] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1329.266063] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1329.266063] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1329.266063] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1329.266236] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1329.266350] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1329.266516] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1329.266673] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69328) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1329.266897] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager.update_available_resource {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1329.770664] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1329.770980] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1329.771184] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1329.771344] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69328) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1329.773608] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5a25796-d0e1-4c56-a208-dde8c6d8afd2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.781872] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3403dd37-b63a-440a-a68d-c4120d84b5af {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.795742] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d4d0e1d-447f-49f1-966b-0e39c2fe5882 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.801744] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaa656c6-3bbf-4feb-a947-186f6212df64 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.829512] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180702MB free_disk=116GB free_vcpus=48 pci_devices=None {{(pid=69328) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1329.829647] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1329.829849] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1330.858150] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance d19f6a2a-3a16-4031-8c20-143ccfd6f5f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1330.858411] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 55107d36-c16b-43f9-b436-0de8d9dfd0ca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1330.858457] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1330.858603] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=69328) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1330.858744] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1088MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=69328) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1330.905641] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edcd440f-c1d7-4664-b9d6-c59194ad3f2a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.913576] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82a4c888-8ae5-488f-bd5d-64399fa73cd1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.944260] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b17fedf1-bd1b-43ba-9818-48a44445fc18 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.951262] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-775c81cf-ea7f-4d3c-b692-98a725875325 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.964154] env[69328]: DEBUG nova.compute.provider_tree [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1331.346501] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c1dc9845-e600-46e1-9e4d-7a93592f71bf tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "55107d36-c16b-43f9-b436-0de8d9dfd0ca" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1331.346885] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c1dc9845-e600-46e1-9e4d-7a93592f71bf tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "55107d36-c16b-43f9-b436-0de8d9dfd0ca" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1331.347174] env[69328]: DEBUG nova.compute.manager [None req-c1dc9845-e600-46e1-9e4d-7a93592f71bf tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1331.348283] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3769e8a-659f-492f-94d7-da5f524df69d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.355108] env[69328]: DEBUG nova.compute.manager [None req-c1dc9845-e600-46e1-9e4d-7a93592f71bf tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69328) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1331.355653] env[69328]: DEBUG nova.objects.instance [None req-c1dc9845-e600-46e1-9e4d-7a93592f71bf tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lazy-loading 'flavor' on Instance uuid 55107d36-c16b-43f9-b436-0de8d9dfd0ca {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1331.467675] env[69328]: DEBUG nova.scheduler.client.report [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1331.976309] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69328) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1331.976886] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.146s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1332.363446] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1dc9845-e600-46e1-9e4d-7a93592f71bf tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1332.363770] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5d432dd0-d8ae-479a-9bc0-d716dbf36655 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.371514] env[69328]: DEBUG oslo_vmware.api [None req-c1dc9845-e600-46e1-9e4d-7a93592f71bf tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1332.371514] env[69328]: value = "task-3274412" [ 1332.371514] env[69328]: _type = "Task" [ 1332.371514] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1332.379223] env[69328]: DEBUG oslo_vmware.api [None req-c1dc9845-e600-46e1-9e4d-7a93592f71bf tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274412, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.882838] env[69328]: DEBUG oslo_vmware.api [None req-c1dc9845-e600-46e1-9e4d-7a93592f71bf tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274412, 'name': PowerOffVM_Task, 'duration_secs': 0.204215} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.884262] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1dc9845-e600-46e1-9e4d-7a93592f71bf tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1332.884262] env[69328]: DEBUG nova.compute.manager [None req-c1dc9845-e600-46e1-9e4d-7a93592f71bf tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1332.884676] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72f6886d-59a3-4b0c-85d7-5628a1aa2ad0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.398186] env[69328]: DEBUG oslo_concurrency.lockutils [None req-c1dc9845-e600-46e1-9e4d-7a93592f71bf tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "55107d36-c16b-43f9-b436-0de8d9dfd0ca" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.051s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1333.745071] env[69328]: DEBUG nova.objects.instance [None req-44db56d1-ab2a-4e9f-91f5-0668ae7d3f71 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lazy-loading 'flavor' on Instance uuid 55107d36-c16b-43f9-b436-0de8d9dfd0ca {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1334.249705] env[69328]: DEBUG oslo_concurrency.lockutils [None req-44db56d1-ab2a-4e9f-91f5-0668ae7d3f71 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "refresh_cache-55107d36-c16b-43f9-b436-0de8d9dfd0ca" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1334.249886] env[69328]: DEBUG oslo_concurrency.lockutils [None req-44db56d1-ab2a-4e9f-91f5-0668ae7d3f71 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquired lock "refresh_cache-55107d36-c16b-43f9-b436-0de8d9dfd0ca" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1334.250078] env[69328]: DEBUG nova.network.neutron [None req-44db56d1-ab2a-4e9f-91f5-0668ae7d3f71 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1334.250284] env[69328]: DEBUG nova.objects.instance [None req-44db56d1-ab2a-4e9f-91f5-0668ae7d3f71 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lazy-loading 'info_cache' on Instance uuid 55107d36-c16b-43f9-b436-0de8d9dfd0ca {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1334.753565] env[69328]: DEBUG nova.objects.base [None req-44db56d1-ab2a-4e9f-91f5-0668ae7d3f71 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Object Instance<55107d36-c16b-43f9-b436-0de8d9dfd0ca> lazy-loaded attributes: flavor,info_cache {{(pid=69328) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1335.305040] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7115c1c6-e20b-459b-b5ad-8b258091020c tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Acquiring lock "d19f6a2a-3a16-4031-8c20-143ccfd6f5f5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1335.305282] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7115c1c6-e20b-459b-b5ad-8b258091020c tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lock "d19f6a2a-3a16-4031-8c20-143ccfd6f5f5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1335.305491] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7115c1c6-e20b-459b-b5ad-8b258091020c tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Acquiring lock "d19f6a2a-3a16-4031-8c20-143ccfd6f5f5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1335.305704] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7115c1c6-e20b-459b-b5ad-8b258091020c tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lock "d19f6a2a-3a16-4031-8c20-143ccfd6f5f5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1335.305875] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7115c1c6-e20b-459b-b5ad-8b258091020c tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lock "d19f6a2a-3a16-4031-8c20-143ccfd6f5f5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1335.307915] env[69328]: INFO nova.compute.manager [None req-7115c1c6-e20b-459b-b5ad-8b258091020c tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Terminating instance [ 1335.442045] env[69328]: DEBUG nova.network.neutron [None req-44db56d1-ab2a-4e9f-91f5-0668ae7d3f71 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Updating instance_info_cache with network_info: [{"id": "d61a3758-8f85-4e39-94d0-95fa0087b49c", "address": "fa:16:3e:b9:82:8b", "network": {"id": "4031def8-0553-461f-9528-aa338b9d7b2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1834835647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.196", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f357b5a9494b4849a83aa934c5d4e26b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "357d2811-e990-4985-9f9e-b158d10d3699", "external-id": "nsx-vlan-transportzone-641", "segmentation_id": 641, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd61a3758-8f", "ovs_interfaceid": "d61a3758-8f85-4e39-94d0-95fa0087b49c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1335.813509] env[69328]: DEBUG nova.compute.manager [None req-7115c1c6-e20b-459b-b5ad-8b258091020c tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1335.813509] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7115c1c6-e20b-459b-b5ad-8b258091020c tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1335.814419] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73ac7d8e-a9b5-486a-a76b-08aefffc6970 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.822519] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7115c1c6-e20b-459b-b5ad-8b258091020c tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1335.822738] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eec1a901-59f4-4419-bd41-38f01e94af92 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.828883] env[69328]: DEBUG oslo_vmware.api [None req-7115c1c6-e20b-459b-b5ad-8b258091020c tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Waiting for the task: (returnval){ [ 1335.828883] env[69328]: value = "task-3274413" [ 1335.828883] env[69328]: _type = "Task" [ 1335.828883] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.836265] env[69328]: DEBUG oslo_vmware.api [None req-7115c1c6-e20b-459b-b5ad-8b258091020c tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274413, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.945212] env[69328]: DEBUG oslo_concurrency.lockutils [None req-44db56d1-ab2a-4e9f-91f5-0668ae7d3f71 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Releasing lock "refresh_cache-55107d36-c16b-43f9-b436-0de8d9dfd0ca" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1336.339687] env[69328]: DEBUG oslo_vmware.api [None req-7115c1c6-e20b-459b-b5ad-8b258091020c tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274413, 'name': PowerOffVM_Task, 'duration_secs': 0.175121} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1336.339935] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-7115c1c6-e20b-459b-b5ad-8b258091020c tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1336.340124] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7115c1c6-e20b-459b-b5ad-8b258091020c tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1336.340375] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-01f56c0c-6a1c-4a48-99bc-f14d142b07f0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.404017] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7115c1c6-e20b-459b-b5ad-8b258091020c tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1336.404245] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7115c1c6-e20b-459b-b5ad-8b258091020c tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1336.404429] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-7115c1c6-e20b-459b-b5ad-8b258091020c tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Deleting the datastore file [datastore2] d19f6a2a-3a16-4031-8c20-143ccfd6f5f5 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1336.404705] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fb4db142-479e-4db1-824e-919779379560 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.411273] env[69328]: DEBUG oslo_vmware.api [None req-7115c1c6-e20b-459b-b5ad-8b258091020c tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Waiting for the task: (returnval){ [ 1336.411273] env[69328]: value = "task-3274415" [ 1336.411273] env[69328]: _type = "Task" [ 1336.411273] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1336.419211] env[69328]: DEBUG oslo_vmware.api [None req-7115c1c6-e20b-459b-b5ad-8b258091020c tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274415, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.921663] env[69328]: DEBUG oslo_vmware.api [None req-7115c1c6-e20b-459b-b5ad-8b258091020c tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274415, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146919} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1336.922050] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-7115c1c6-e20b-459b-b5ad-8b258091020c tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1336.922124] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7115c1c6-e20b-459b-b5ad-8b258091020c tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1336.922313] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-7115c1c6-e20b-459b-b5ad-8b258091020c tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1336.922488] env[69328]: INFO nova.compute.manager [None req-7115c1c6-e20b-459b-b5ad-8b258091020c tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1336.922727] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7115c1c6-e20b-459b-b5ad-8b258091020c tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1336.922918] env[69328]: DEBUG nova.compute.manager [-] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1336.923027] env[69328]: DEBUG nova.network.neutron [-] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1336.950371] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-44db56d1-ab2a-4e9f-91f5-0668ae7d3f71 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1336.950640] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e44d5730-1167-441f-803b-1eaf94e14d9d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.958280] env[69328]: DEBUG oslo_vmware.api [None req-44db56d1-ab2a-4e9f-91f5-0668ae7d3f71 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1336.958280] env[69328]: value = "task-3274416" [ 1336.958280] env[69328]: _type = "Task" [ 1336.958280] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1336.966008] env[69328]: DEBUG oslo_vmware.api [None req-44db56d1-ab2a-4e9f-91f5-0668ae7d3f71 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274416, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1337.353593] env[69328]: DEBUG nova.compute.manager [req-d6a13b31-c311-49d4-a918-45cd18c2496d req-032bc859-77ba-412a-af70-4d05c8e06ab6 service nova] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Received event network-vif-deleted-69e73394-845a-4108-8b2f-6b23a000d98c {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1337.353749] env[69328]: INFO nova.compute.manager [req-d6a13b31-c311-49d4-a918-45cd18c2496d req-032bc859-77ba-412a-af70-4d05c8e06ab6 service nova] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Neutron deleted interface 69e73394-845a-4108-8b2f-6b23a000d98c; detaching it from the instance and deleting it from the info cache [ 1337.353935] env[69328]: DEBUG nova.network.neutron [req-d6a13b31-c311-49d4-a918-45cd18c2496d req-032bc859-77ba-412a-af70-4d05c8e06ab6 service nova] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1337.467940] env[69328]: DEBUG oslo_vmware.api [None req-44db56d1-ab2a-4e9f-91f5-0668ae7d3f71 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274416, 'name': PowerOnVM_Task, 'duration_secs': 0.436579} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1337.468237] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-44db56d1-ab2a-4e9f-91f5-0668ae7d3f71 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1337.468446] env[69328]: DEBUG nova.compute.manager [None req-44db56d1-ab2a-4e9f-91f5-0668ae7d3f71 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1337.469243] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dcaf56f-9a54-4fdf-979d-1e20f89b5f04 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.837456] env[69328]: DEBUG nova.network.neutron [-] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1337.856944] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d7bef042-92b2-4b37-996c-5392c387dfdf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.867185] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-698da53c-ceff-4fc5-b2cc-3d263a2d8f5d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.892185] env[69328]: DEBUG nova.compute.manager [req-d6a13b31-c311-49d4-a918-45cd18c2496d req-032bc859-77ba-412a-af70-4d05c8e06ab6 service nova] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Detach interface failed, port_id=69e73394-845a-4108-8b2f-6b23a000d98c, reason: Instance d19f6a2a-3a16-4031-8c20-143ccfd6f5f5 could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1338.340067] env[69328]: INFO nova.compute.manager [-] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Took 1.42 seconds to deallocate network for instance. [ 1338.511646] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c654547-69d2-491f-be05-f0a373326f95 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.518402] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-235323e2-7039-44e8-821a-39261c1e7172 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Suspending the VM {{(pid=69328) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1338.518644] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-fcb2e662-aea6-4e2b-8c40-f1ed2fbe84a6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.525525] env[69328]: DEBUG oslo_vmware.api [None req-235323e2-7039-44e8-821a-39261c1e7172 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1338.525525] env[69328]: value = "task-3274417" [ 1338.525525] env[69328]: _type = "Task" [ 1338.525525] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.535493] env[69328]: DEBUG oslo_vmware.api [None req-235323e2-7039-44e8-821a-39261c1e7172 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274417, 'name': SuspendVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.845691] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7115c1c6-e20b-459b-b5ad-8b258091020c tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1338.846105] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7115c1c6-e20b-459b-b5ad-8b258091020c tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1338.846339] env[69328]: DEBUG nova.objects.instance [None req-7115c1c6-e20b-459b-b5ad-8b258091020c tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lazy-loading 'resources' on Instance uuid d19f6a2a-3a16-4031-8c20-143ccfd6f5f5 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1339.035478] env[69328]: DEBUG oslo_vmware.api [None req-235323e2-7039-44e8-821a-39261c1e7172 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274417, 'name': SuspendVM_Task} progress is 100%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.399424] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60e71d21-363d-4673-8b1a-0080eb303ab5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.407207] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c45a265-2147-4998-b356-f8e7701e7c0a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.435483] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95874347-8520-4fc7-8da1-49dccbbf4dc8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.442207] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17c3c705-870b-48bf-8256-70db59074054 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.455723] env[69328]: DEBUG nova.compute.provider_tree [None req-7115c1c6-e20b-459b-b5ad-8b258091020c tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1339.535337] env[69328]: DEBUG oslo_vmware.api [None req-235323e2-7039-44e8-821a-39261c1e7172 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274417, 'name': SuspendVM_Task, 'duration_secs': 0.532668} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.535580] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-235323e2-7039-44e8-821a-39261c1e7172 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Suspended the VM {{(pid=69328) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1339.535756] env[69328]: DEBUG nova.compute.manager [None req-235323e2-7039-44e8-821a-39261c1e7172 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1339.536486] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f79269ae-ada4-46c0-af9e-ed0082131e1a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.959233] env[69328]: DEBUG nova.scheduler.client.report [None req-7115c1c6-e20b-459b-b5ad-8b258091020c tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1340.463796] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7115c1c6-e20b-459b-b5ad-8b258091020c tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.618s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1340.483058] env[69328]: INFO nova.scheduler.client.report [None req-7115c1c6-e20b-459b-b5ad-8b258091020c tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Deleted allocations for instance d19f6a2a-3a16-4031-8c20-143ccfd6f5f5 [ 1340.817402] env[69328]: INFO nova.compute.manager [None req-36acca2b-c8d5-4170-ae6f-ae2a988a1c22 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Resuming [ 1340.818071] env[69328]: DEBUG nova.objects.instance [None req-36acca2b-c8d5-4170-ae6f-ae2a988a1c22 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lazy-loading 'flavor' on Instance uuid 55107d36-c16b-43f9-b436-0de8d9dfd0ca {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1340.990736] env[69328]: DEBUG oslo_concurrency.lockutils [None req-7115c1c6-e20b-459b-b5ad-8b258091020c tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lock "d19f6a2a-3a16-4031-8c20-143ccfd6f5f5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.685s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1342.327932] env[69328]: DEBUG oslo_concurrency.lockutils [None req-36acca2b-c8d5-4170-ae6f-ae2a988a1c22 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "refresh_cache-55107d36-c16b-43f9-b436-0de8d9dfd0ca" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1342.327932] env[69328]: DEBUG oslo_concurrency.lockutils [None req-36acca2b-c8d5-4170-ae6f-ae2a988a1c22 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquired lock "refresh_cache-55107d36-c16b-43f9-b436-0de8d9dfd0ca" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1342.327932] env[69328]: DEBUG nova.network.neutron [None req-36acca2b-c8d5-4170-ae6f-ae2a988a1c22 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1343.026524] env[69328]: DEBUG nova.network.neutron [None req-36acca2b-c8d5-4170-ae6f-ae2a988a1c22 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Updating instance_info_cache with network_info: [{"id": "d61a3758-8f85-4e39-94d0-95fa0087b49c", "address": "fa:16:3e:b9:82:8b", "network": {"id": "4031def8-0553-461f-9528-aa338b9d7b2f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1834835647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.196", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f357b5a9494b4849a83aa934c5d4e26b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "357d2811-e990-4985-9f9e-b158d10d3699", "external-id": "nsx-vlan-transportzone-641", "segmentation_id": 641, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd61a3758-8f", "ovs_interfaceid": "d61a3758-8f85-4e39-94d0-95fa0087b49c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1343.135521] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Acquiring lock "7ee57873-8f9a-4bc6-9b88-261cd6239774" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1343.135759] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lock "7ee57873-8f9a-4bc6-9b88-261cd6239774" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1343.530171] env[69328]: DEBUG oslo_concurrency.lockutils [None req-36acca2b-c8d5-4170-ae6f-ae2a988a1c22 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Releasing lock "refresh_cache-55107d36-c16b-43f9-b436-0de8d9dfd0ca" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1343.531218] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9edf389e-d605-4ba4-a944-17eaeac1b11c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.538372] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-36acca2b-c8d5-4170-ae6f-ae2a988a1c22 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Resuming the VM {{(pid=69328) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1343.538588] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9bd76767-bd90-4a4a-a81e-687dedaa5315 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.545837] env[69328]: DEBUG oslo_vmware.api [None req-36acca2b-c8d5-4170-ae6f-ae2a988a1c22 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1343.545837] env[69328]: value = "task-3274418" [ 1343.545837] env[69328]: _type = "Task" [ 1343.545837] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1343.552653] env[69328]: DEBUG oslo_vmware.api [None req-36acca2b-c8d5-4170-ae6f-ae2a988a1c22 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274418, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.640494] env[69328]: DEBUG nova.compute.manager [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1344.056661] env[69328]: DEBUG oslo_vmware.api [None req-36acca2b-c8d5-4170-ae6f-ae2a988a1c22 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274418, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.163495] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1344.163787] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1344.165398] env[69328]: INFO nova.compute.claims [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1344.557818] env[69328]: DEBUG oslo_vmware.api [None req-36acca2b-c8d5-4170-ae6f-ae2a988a1c22 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274418, 'name': PowerOnVM_Task, 'duration_secs': 0.572313} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1344.558205] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-36acca2b-c8d5-4170-ae6f-ae2a988a1c22 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Resumed the VM {{(pid=69328) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1344.558339] env[69328]: DEBUG nova.compute.manager [None req-36acca2b-c8d5-4170-ae6f-ae2a988a1c22 tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1344.559165] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9715f2a0-ae8a-480d-8af7-9ca6401c980c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.219105] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-647880ab-3592-4416-ad00-99f82c8b7e4c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.227854] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac7c6989-7191-4f70-a0af-2fbcae226881 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.256783] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a0f22e7-1b50-48bf-8abb-0e6b7ebe32a5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.263606] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc2a536f-7fb9-4b29-b9bf-cd78ef565710 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.276799] env[69328]: DEBUG nova.compute.provider_tree [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1345.511693] env[69328]: DEBUG oslo_concurrency.lockutils [None req-511341a2-d8ee-45b2-a648-f514dcccd7ee tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "55107d36-c16b-43f9-b436-0de8d9dfd0ca" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1345.511957] env[69328]: DEBUG oslo_concurrency.lockutils [None req-511341a2-d8ee-45b2-a648-f514dcccd7ee tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "55107d36-c16b-43f9-b436-0de8d9dfd0ca" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1345.512181] env[69328]: DEBUG oslo_concurrency.lockutils [None req-511341a2-d8ee-45b2-a648-f514dcccd7ee tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "55107d36-c16b-43f9-b436-0de8d9dfd0ca-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1345.512367] env[69328]: DEBUG oslo_concurrency.lockutils [None req-511341a2-d8ee-45b2-a648-f514dcccd7ee tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "55107d36-c16b-43f9-b436-0de8d9dfd0ca-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1345.512534] env[69328]: DEBUG oslo_concurrency.lockutils [None req-511341a2-d8ee-45b2-a648-f514dcccd7ee tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "55107d36-c16b-43f9-b436-0de8d9dfd0ca-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1345.514730] env[69328]: INFO nova.compute.manager [None req-511341a2-d8ee-45b2-a648-f514dcccd7ee tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Terminating instance [ 1345.780046] env[69328]: DEBUG nova.scheduler.client.report [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1346.018758] env[69328]: DEBUG nova.compute.manager [None req-511341a2-d8ee-45b2-a648-f514dcccd7ee tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1346.019020] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-511341a2-d8ee-45b2-a648-f514dcccd7ee tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1346.019917] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7569fd3-02b8-4909-bd36-afd60210a706 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.027710] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-511341a2-d8ee-45b2-a648-f514dcccd7ee tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1346.027924] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1d6c602b-b891-4fbb-b997-c24c826ac5bf {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.033945] env[69328]: DEBUG oslo_vmware.api [None req-511341a2-d8ee-45b2-a648-f514dcccd7ee tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1346.033945] env[69328]: value = "task-3274419" [ 1346.033945] env[69328]: _type = "Task" [ 1346.033945] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.040984] env[69328]: DEBUG oslo_vmware.api [None req-511341a2-d8ee-45b2-a648-f514dcccd7ee tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274419, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.285234] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.121s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1346.285743] env[69328]: DEBUG nova.compute.manager [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1346.545183] env[69328]: DEBUG oslo_vmware.api [None req-511341a2-d8ee-45b2-a648-f514dcccd7ee tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274419, 'name': PowerOffVM_Task, 'duration_secs': 0.16124} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.545390] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-511341a2-d8ee-45b2-a648-f514dcccd7ee tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1346.545555] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-511341a2-d8ee-45b2-a648-f514dcccd7ee tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1346.545799] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b8a51329-3846-4db0-85b1-71b4d18527c5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.607189] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-511341a2-d8ee-45b2-a648-f514dcccd7ee tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1346.607507] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-511341a2-d8ee-45b2-a648-f514dcccd7ee tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1346.607774] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-511341a2-d8ee-45b2-a648-f514dcccd7ee tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Deleting the datastore file [datastore1] 55107d36-c16b-43f9-b436-0de8d9dfd0ca {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1346.608150] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2d711591-ceba-4355-a90f-d3940be016ce {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.614821] env[69328]: DEBUG oslo_vmware.api [None req-511341a2-d8ee-45b2-a648-f514dcccd7ee tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for the task: (returnval){ [ 1346.614821] env[69328]: value = "task-3274421" [ 1346.614821] env[69328]: _type = "Task" [ 1346.614821] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.623294] env[69328]: DEBUG oslo_vmware.api [None req-511341a2-d8ee-45b2-a648-f514dcccd7ee tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274421, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.791370] env[69328]: DEBUG nova.compute.utils [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1346.792852] env[69328]: DEBUG nova.compute.manager [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1346.793085] env[69328]: DEBUG nova.network.neutron [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1346.830252] env[69328]: DEBUG nova.policy [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2be0cfdc98ee4199a8df31f70faa4b49', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '278be2f8452946b9ab9c4bce8f9a7557', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 1347.108620] env[69328]: DEBUG nova.network.neutron [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Successfully created port: e3c363b2-cdba-41b8-b6f5-150b083f2ec8 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1347.125938] env[69328]: DEBUG oslo_vmware.api [None req-511341a2-d8ee-45b2-a648-f514dcccd7ee tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Task: {'id': task-3274421, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.134876} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.126207] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-511341a2-d8ee-45b2-a648-f514dcccd7ee tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1347.126392] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-511341a2-d8ee-45b2-a648-f514dcccd7ee tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1347.126567] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-511341a2-d8ee-45b2-a648-f514dcccd7ee tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1347.126771] env[69328]: INFO nova.compute.manager [None req-511341a2-d8ee-45b2-a648-f514dcccd7ee tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1347.127045] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-511341a2-d8ee-45b2-a648-f514dcccd7ee tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1347.127246] env[69328]: DEBUG nova.compute.manager [-] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1347.127342] env[69328]: DEBUG nova.network.neutron [-] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1347.296817] env[69328]: DEBUG nova.compute.manager [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1347.592321] env[69328]: DEBUG nova.compute.manager [req-b2610650-bfb6-4bfa-be95-8bd1bb312bef req-74581a66-5fc2-4b0e-bfbb-0d539b6935f1 service nova] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Received event network-vif-deleted-d61a3758-8f85-4e39-94d0-95fa0087b49c {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1347.592527] env[69328]: INFO nova.compute.manager [req-b2610650-bfb6-4bfa-be95-8bd1bb312bef req-74581a66-5fc2-4b0e-bfbb-0d539b6935f1 service nova] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Neutron deleted interface d61a3758-8f85-4e39-94d0-95fa0087b49c; detaching it from the instance and deleting it from the info cache [ 1347.592725] env[69328]: DEBUG nova.network.neutron [req-b2610650-bfb6-4bfa-be95-8bd1bb312bef req-74581a66-5fc2-4b0e-bfbb-0d539b6935f1 service nova] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1348.077032] env[69328]: DEBUG nova.network.neutron [-] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1348.095220] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3b0d8660-2e73-41f7-9d4a-e5ddb76efc11 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.106889] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23515eb8-ce15-4a05-b010-a4ce22cb90c0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.131799] env[69328]: DEBUG nova.compute.manager [req-b2610650-bfb6-4bfa-be95-8bd1bb312bef req-74581a66-5fc2-4b0e-bfbb-0d539b6935f1 service nova] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Detach interface failed, port_id=d61a3758-8f85-4e39-94d0-95fa0087b49c, reason: Instance 55107d36-c16b-43f9-b436-0de8d9dfd0ca could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1348.306592] env[69328]: DEBUG nova.compute.manager [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1348.332541] env[69328]: DEBUG nova.virt.hardware [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1348.332795] env[69328]: DEBUG nova.virt.hardware [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1348.332949] env[69328]: DEBUG nova.virt.hardware [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1348.333149] env[69328]: DEBUG nova.virt.hardware [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1348.333298] env[69328]: DEBUG nova.virt.hardware [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1348.333443] env[69328]: DEBUG nova.virt.hardware [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1348.333653] env[69328]: DEBUG nova.virt.hardware [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1348.333814] env[69328]: DEBUG nova.virt.hardware [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1348.333980] env[69328]: DEBUG nova.virt.hardware [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1348.334156] env[69328]: DEBUG nova.virt.hardware [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1348.334327] env[69328]: DEBUG nova.virt.hardware [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1348.335293] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44f40b7c-2355-4fd9-9c99-e1588f31792d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.343422] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d87ee743-2a78-43b1-92c3-e498f06c81b7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.517730] env[69328]: DEBUG nova.compute.manager [req-0b1caae1-ec54-4daa-8f7f-f12246b5a542 req-05039017-9c0c-4e9d-9c82-a8979d6f5283 service nova] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Received event network-vif-plugged-e3c363b2-cdba-41b8-b6f5-150b083f2ec8 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1348.517852] env[69328]: DEBUG oslo_concurrency.lockutils [req-0b1caae1-ec54-4daa-8f7f-f12246b5a542 req-05039017-9c0c-4e9d-9c82-a8979d6f5283 service nova] Acquiring lock "7ee57873-8f9a-4bc6-9b88-261cd6239774-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1348.518043] env[69328]: DEBUG oslo_concurrency.lockutils [req-0b1caae1-ec54-4daa-8f7f-f12246b5a542 req-05039017-9c0c-4e9d-9c82-a8979d6f5283 service nova] Lock "7ee57873-8f9a-4bc6-9b88-261cd6239774-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1348.518225] env[69328]: DEBUG oslo_concurrency.lockutils [req-0b1caae1-ec54-4daa-8f7f-f12246b5a542 req-05039017-9c0c-4e9d-9c82-a8979d6f5283 service nova] Lock "7ee57873-8f9a-4bc6-9b88-261cd6239774-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1348.518384] env[69328]: DEBUG nova.compute.manager [req-0b1caae1-ec54-4daa-8f7f-f12246b5a542 req-05039017-9c0c-4e9d-9c82-a8979d6f5283 service nova] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] No waiting events found dispatching network-vif-plugged-e3c363b2-cdba-41b8-b6f5-150b083f2ec8 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1348.518547] env[69328]: WARNING nova.compute.manager [req-0b1caae1-ec54-4daa-8f7f-f12246b5a542 req-05039017-9c0c-4e9d-9c82-a8979d6f5283 service nova] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Received unexpected event network-vif-plugged-e3c363b2-cdba-41b8-b6f5-150b083f2ec8 for instance with vm_state building and task_state spawning. [ 1348.579830] env[69328]: INFO nova.compute.manager [-] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Took 1.45 seconds to deallocate network for instance. [ 1348.599197] env[69328]: DEBUG nova.network.neutron [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Successfully updated port: e3c363b2-cdba-41b8-b6f5-150b083f2ec8 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1349.086708] env[69328]: DEBUG oslo_concurrency.lockutils [None req-511341a2-d8ee-45b2-a648-f514dcccd7ee tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1349.087070] env[69328]: DEBUG oslo_concurrency.lockutils [None req-511341a2-d8ee-45b2-a648-f514dcccd7ee tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1349.087314] env[69328]: DEBUG nova.objects.instance [None req-511341a2-d8ee-45b2-a648-f514dcccd7ee tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lazy-loading 'resources' on Instance uuid 55107d36-c16b-43f9-b436-0de8d9dfd0ca {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1349.101251] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Acquiring lock "refresh_cache-7ee57873-8f9a-4bc6-9b88-261cd6239774" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1349.101379] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Acquired lock "refresh_cache-7ee57873-8f9a-4bc6-9b88-261cd6239774" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1349.101521] env[69328]: DEBUG nova.network.neutron [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1349.632996] env[69328]: DEBUG nova.network.neutron [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1349.641319] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b47ba2d-3738-4c33-9878-13d2005fa637 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.650721] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e521fab1-2754-4eb1-a603-09c60ef949f9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.682240] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b347ac7d-3d8d-4b86-9307-50862eb22b70 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.689255] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be8ffcda-f375-4be9-8c3d-601c88f16fe5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.702847] env[69328]: DEBUG nova.compute.provider_tree [None req-511341a2-d8ee-45b2-a648-f514dcccd7ee tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1349.764900] env[69328]: DEBUG nova.network.neutron [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Updating instance_info_cache with network_info: [{"id": "e3c363b2-cdba-41b8-b6f5-150b083f2ec8", "address": "fa:16:3e:af:11:0e", "network": {"id": "6d64fb31-1957-4722-a4b3-46b946bfb65a", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1232247602-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "278be2f8452946b9ab9c4bce8f9a7557", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5bd281ed-ae39-485f-90ee-4ee27994b5b0", "external-id": "nsx-vlan-transportzone-305", "segmentation_id": 305, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape3c363b2-cd", "ovs_interfaceid": "e3c363b2-cdba-41b8-b6f5-150b083f2ec8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1350.205940] env[69328]: DEBUG nova.scheduler.client.report [None req-511341a2-d8ee-45b2-a648-f514dcccd7ee tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1350.267522] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Releasing lock "refresh_cache-7ee57873-8f9a-4bc6-9b88-261cd6239774" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1350.267914] env[69328]: DEBUG nova.compute.manager [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Instance network_info: |[{"id": "e3c363b2-cdba-41b8-b6f5-150b083f2ec8", "address": "fa:16:3e:af:11:0e", "network": {"id": "6d64fb31-1957-4722-a4b3-46b946bfb65a", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1232247602-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "278be2f8452946b9ab9c4bce8f9a7557", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5bd281ed-ae39-485f-90ee-4ee27994b5b0", "external-id": "nsx-vlan-transportzone-305", "segmentation_id": 305, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape3c363b2-cd", "ovs_interfaceid": "e3c363b2-cdba-41b8-b6f5-150b083f2ec8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1350.268354] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:af:11:0e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5bd281ed-ae39-485f-90ee-4ee27994b5b0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e3c363b2-cdba-41b8-b6f5-150b083f2ec8', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1350.276379] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1350.276586] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1350.277243] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b4f7a72f-7b6c-4625-8616-8b19d976a602 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.296561] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1350.296561] env[69328]: value = "task-3274422" [ 1350.296561] env[69328]: _type = "Task" [ 1350.296561] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.303878] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274422, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.548589] env[69328]: DEBUG nova.compute.manager [req-caa50b9e-4b4b-4b25-af33-7862dd068907 req-f657f2c7-3cc2-46cf-9617-1c34c99a82d3 service nova] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Received event network-changed-e3c363b2-cdba-41b8-b6f5-150b083f2ec8 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1350.548757] env[69328]: DEBUG nova.compute.manager [req-caa50b9e-4b4b-4b25-af33-7862dd068907 req-f657f2c7-3cc2-46cf-9617-1c34c99a82d3 service nova] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Refreshing instance network info cache due to event network-changed-e3c363b2-cdba-41b8-b6f5-150b083f2ec8. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1350.548974] env[69328]: DEBUG oslo_concurrency.lockutils [req-caa50b9e-4b4b-4b25-af33-7862dd068907 req-f657f2c7-3cc2-46cf-9617-1c34c99a82d3 service nova] Acquiring lock "refresh_cache-7ee57873-8f9a-4bc6-9b88-261cd6239774" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1350.549135] env[69328]: DEBUG oslo_concurrency.lockutils [req-caa50b9e-4b4b-4b25-af33-7862dd068907 req-f657f2c7-3cc2-46cf-9617-1c34c99a82d3 service nova] Acquired lock "refresh_cache-7ee57873-8f9a-4bc6-9b88-261cd6239774" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1350.549298] env[69328]: DEBUG nova.network.neutron [req-caa50b9e-4b4b-4b25-af33-7862dd068907 req-f657f2c7-3cc2-46cf-9617-1c34c99a82d3 service nova] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Refreshing network info cache for port e3c363b2-cdba-41b8-b6f5-150b083f2ec8 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1350.710242] env[69328]: DEBUG oslo_concurrency.lockutils [None req-511341a2-d8ee-45b2-a648-f514dcccd7ee tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.623s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1350.727636] env[69328]: INFO nova.scheduler.client.report [None req-511341a2-d8ee-45b2-a648-f514dcccd7ee tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Deleted allocations for instance 55107d36-c16b-43f9-b436-0de8d9dfd0ca [ 1350.806335] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274422, 'name': CreateVM_Task, 'duration_secs': 0.307534} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.806464] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1350.807127] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1350.807290] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1350.807622] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1350.808120] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74a164d0-5af4-4c33-938b-d07563049603 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.812333] env[69328]: DEBUG oslo_vmware.api [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Waiting for the task: (returnval){ [ 1350.812333] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5271a186-b1aa-efb0-3916-bc0cab512908" [ 1350.812333] env[69328]: _type = "Task" [ 1350.812333] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.819764] env[69328]: DEBUG oslo_vmware.api [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5271a186-b1aa-efb0-3916-bc0cab512908, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.234392] env[69328]: DEBUG oslo_concurrency.lockutils [None req-511341a2-d8ee-45b2-a648-f514dcccd7ee tempest-ServerActionsTestJSON-1885345193 tempest-ServerActionsTestJSON-1885345193-project-member] Lock "55107d36-c16b-43f9-b436-0de8d9dfd0ca" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.722s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1351.237703] env[69328]: DEBUG nova.network.neutron [req-caa50b9e-4b4b-4b25-af33-7862dd068907 req-f657f2c7-3cc2-46cf-9617-1c34c99a82d3 service nova] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Updated VIF entry in instance network info cache for port e3c363b2-cdba-41b8-b6f5-150b083f2ec8. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1351.238063] env[69328]: DEBUG nova.network.neutron [req-caa50b9e-4b4b-4b25-af33-7862dd068907 req-f657f2c7-3cc2-46cf-9617-1c34c99a82d3 service nova] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Updating instance_info_cache with network_info: [{"id": "e3c363b2-cdba-41b8-b6f5-150b083f2ec8", "address": "fa:16:3e:af:11:0e", "network": {"id": "6d64fb31-1957-4722-a4b3-46b946bfb65a", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1232247602-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "278be2f8452946b9ab9c4bce8f9a7557", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5bd281ed-ae39-485f-90ee-4ee27994b5b0", "external-id": "nsx-vlan-transportzone-305", "segmentation_id": 305, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape3c363b2-cd", "ovs_interfaceid": "e3c363b2-cdba-41b8-b6f5-150b083f2ec8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1351.322172] env[69328]: DEBUG oslo_vmware.api [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5271a186-b1aa-efb0-3916-bc0cab512908, 'name': SearchDatastore_Task, 'duration_secs': 0.009717} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1351.322468] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1351.322700] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1351.322934] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1351.323092] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1351.323272] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1351.323520] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f899e5af-1df5-4eb7-89ae-07cd6ca08816 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.332480] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1351.332611] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1351.333301] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70c0f2b4-3d97-41b0-b799-f344237dfed8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.337723] env[69328]: DEBUG oslo_vmware.api [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Waiting for the task: (returnval){ [ 1351.337723] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5228047f-51c2-1e38-cdd9-88339cb9cfa2" [ 1351.337723] env[69328]: _type = "Task" [ 1351.337723] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.344797] env[69328]: DEBUG oslo_vmware.api [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5228047f-51c2-1e38-cdd9-88339cb9cfa2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.740062] env[69328]: DEBUG oslo_concurrency.lockutils [req-caa50b9e-4b4b-4b25-af33-7862dd068907 req-f657f2c7-3cc2-46cf-9617-1c34c99a82d3 service nova] Releasing lock "refresh_cache-7ee57873-8f9a-4bc6-9b88-261cd6239774" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1351.848210] env[69328]: DEBUG oslo_vmware.api [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5228047f-51c2-1e38-cdd9-88339cb9cfa2, 'name': SearchDatastore_Task, 'duration_secs': 0.008294} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1351.848948] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3451af1d-1cd1-476d-b35c-368c980af90f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.853763] env[69328]: DEBUG oslo_vmware.api [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Waiting for the task: (returnval){ [ 1351.853763] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]523b767d-a111-0965-010e-8db79b059bbf" [ 1351.853763] env[69328]: _type = "Task" [ 1351.853763] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.862617] env[69328]: DEBUG oslo_vmware.api [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]523b767d-a111-0965-010e-8db79b059bbf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.364529] env[69328]: DEBUG oslo_vmware.api [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]523b767d-a111-0965-010e-8db79b059bbf, 'name': SearchDatastore_Task, 'duration_secs': 0.009693} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.364931] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1352.365314] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 7ee57873-8f9a-4bc6-9b88-261cd6239774/7ee57873-8f9a-4bc6-9b88-261cd6239774.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1352.365660] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c06624d9-536c-4361-b7d6-fb74f1658f86 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.372141] env[69328]: DEBUG oslo_vmware.api [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Waiting for the task: (returnval){ [ 1352.372141] env[69328]: value = "task-3274423" [ 1352.372141] env[69328]: _type = "Task" [ 1352.372141] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.379940] env[69328]: DEBUG oslo_vmware.api [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274423, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.882236] env[69328]: DEBUG oslo_vmware.api [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274423, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.436091} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.882369] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore2] 7ee57873-8f9a-4bc6-9b88-261cd6239774/7ee57873-8f9a-4bc6-9b88-261cd6239774.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1352.882645] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1352.882924] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-775f7e76-272e-4cb2-af22-f651929a2c9c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.889641] env[69328]: DEBUG oslo_vmware.api [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Waiting for the task: (returnval){ [ 1352.889641] env[69328]: value = "task-3274424" [ 1352.889641] env[69328]: _type = "Task" [ 1352.889641] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.897952] env[69328]: DEBUG oslo_vmware.api [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274424, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.399769] env[69328]: DEBUG oslo_vmware.api [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274424, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059439} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.400022] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1353.400750] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba33d2d6-b04a-4c5b-8b85-e75642859d16 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.423122] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Reconfiguring VM instance instance-0000007e to attach disk [datastore2] 7ee57873-8f9a-4bc6-9b88-261cd6239774/7ee57873-8f9a-4bc6-9b88-261cd6239774.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1353.423406] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5bca7ead-1d21-43de-99fe-d5365fa7f433 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.443521] env[69328]: DEBUG oslo_vmware.api [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Waiting for the task: (returnval){ [ 1353.443521] env[69328]: value = "task-3274425" [ 1353.443521] env[69328]: _type = "Task" [ 1353.443521] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.451588] env[69328]: DEBUG oslo_vmware.api [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274425, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.954014] env[69328]: DEBUG oslo_vmware.api [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274425, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.437019] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d8745e37-b8ed-4ca1-9b24-c3908e6c08b9 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquiring lock "1a0e084a-f7b2-4f2e-b508-33caeed2ffeb" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1354.437019] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d8745e37-b8ed-4ca1-9b24-c3908e6c08b9 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "1a0e084a-f7b2-4f2e-b508-33caeed2ffeb" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1354.454632] env[69328]: DEBUG oslo_vmware.api [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274425, 'name': ReconfigVM_Task, 'duration_secs': 0.761884} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.455032] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Reconfigured VM instance instance-0000007e to attach disk [datastore2] 7ee57873-8f9a-4bc6-9b88-261cd6239774/7ee57873-8f9a-4bc6-9b88-261cd6239774.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1354.455740] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e7bbd814-08ca-4b0d-ac82-bd3fdc469c6c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.462361] env[69328]: DEBUG oslo_vmware.api [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Waiting for the task: (returnval){ [ 1354.462361] env[69328]: value = "task-3274426" [ 1354.462361] env[69328]: _type = "Task" [ 1354.462361] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.469816] env[69328]: DEBUG oslo_vmware.api [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274426, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.940480] env[69328]: DEBUG nova.compute.utils [None req-d8745e37-b8ed-4ca1-9b24-c3908e6c08b9 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1354.972536] env[69328]: DEBUG oslo_vmware.api [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274426, 'name': Rename_Task, 'duration_secs': 0.140874} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.972807] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1354.973060] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fc01a99c-e09d-41bc-ac24-5c43d5a32036 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.979759] env[69328]: DEBUG oslo_vmware.api [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Waiting for the task: (returnval){ [ 1354.979759] env[69328]: value = "task-3274427" [ 1354.979759] env[69328]: _type = "Task" [ 1354.979759] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.988482] env[69328]: DEBUG oslo_vmware.api [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274427, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.443586] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d8745e37-b8ed-4ca1-9b24-c3908e6c08b9 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "1a0e084a-f7b2-4f2e-b508-33caeed2ffeb" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1355.489533] env[69328]: DEBUG oslo_vmware.api [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274427, 'name': PowerOnVM_Task, 'duration_secs': 0.46381} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1355.489782] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1355.489977] env[69328]: INFO nova.compute.manager [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Took 7.18 seconds to spawn the instance on the hypervisor. [ 1355.490172] env[69328]: DEBUG nova.compute.manager [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1355.490931] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-217b019a-c8bd-43af-a4c7-5247eb80cc6b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.008584] env[69328]: INFO nova.compute.manager [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Took 11.86 seconds to build instance. [ 1356.173105] env[69328]: DEBUG nova.compute.manager [req-0a5cf2d4-bc93-4951-b6b8-fbf3db0078c4 req-1de99281-1f74-4a4b-be2e-ce0e8286c1ae service nova] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Received event network-changed-e3c363b2-cdba-41b8-b6f5-150b083f2ec8 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1356.173321] env[69328]: DEBUG nova.compute.manager [req-0a5cf2d4-bc93-4951-b6b8-fbf3db0078c4 req-1de99281-1f74-4a4b-be2e-ce0e8286c1ae service nova] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Refreshing instance network info cache due to event network-changed-e3c363b2-cdba-41b8-b6f5-150b083f2ec8. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1356.173554] env[69328]: DEBUG oslo_concurrency.lockutils [req-0a5cf2d4-bc93-4951-b6b8-fbf3db0078c4 req-1de99281-1f74-4a4b-be2e-ce0e8286c1ae service nova] Acquiring lock "refresh_cache-7ee57873-8f9a-4bc6-9b88-261cd6239774" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1356.173657] env[69328]: DEBUG oslo_concurrency.lockutils [req-0a5cf2d4-bc93-4951-b6b8-fbf3db0078c4 req-1de99281-1f74-4a4b-be2e-ce0e8286c1ae service nova] Acquired lock "refresh_cache-7ee57873-8f9a-4bc6-9b88-261cd6239774" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1356.173819] env[69328]: DEBUG nova.network.neutron [req-0a5cf2d4-bc93-4951-b6b8-fbf3db0078c4 req-1de99281-1f74-4a4b-be2e-ce0e8286c1ae service nova] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Refreshing network info cache for port e3c363b2-cdba-41b8-b6f5-150b083f2ec8 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1356.509128] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d8745e37-b8ed-4ca1-9b24-c3908e6c08b9 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquiring lock "1a0e084a-f7b2-4f2e-b508-33caeed2ffeb" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1356.509128] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d8745e37-b8ed-4ca1-9b24-c3908e6c08b9 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "1a0e084a-f7b2-4f2e-b508-33caeed2ffeb" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1356.509128] env[69328]: INFO nova.compute.manager [None req-d8745e37-b8ed-4ca1-9b24-c3908e6c08b9 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Attaching volume 61bc6b0d-9eae-421b-ac7c-12ff4c81341d to /dev/sdb [ 1356.510608] env[69328]: DEBUG oslo_concurrency.lockutils [None req-0608503a-c426-4f7b-8b46-060a9168dca9 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lock "7ee57873-8f9a-4bc6-9b88-261cd6239774" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.375s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1356.538470] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93017c9f-34e4-4bf9-b49a-d4168cb661ae {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.545565] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4f4dd2c-2476-4a14-8de3-9d32a89d606d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.559242] env[69328]: DEBUG nova.virt.block_device [None req-d8745e37-b8ed-4ca1-9b24-c3908e6c08b9 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Updating existing volume attachment record: dcb92be2-3375-4d83-bd27-cbad2b2d5896 {{(pid=69328) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1357.121017] env[69328]: DEBUG nova.network.neutron [req-0a5cf2d4-bc93-4951-b6b8-fbf3db0078c4 req-1de99281-1f74-4a4b-be2e-ce0e8286c1ae service nova] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Updated VIF entry in instance network info cache for port e3c363b2-cdba-41b8-b6f5-150b083f2ec8. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1357.121567] env[69328]: DEBUG nova.network.neutron [req-0a5cf2d4-bc93-4951-b6b8-fbf3db0078c4 req-1de99281-1f74-4a4b-be2e-ce0e8286c1ae service nova] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Updating instance_info_cache with network_info: [{"id": "e3c363b2-cdba-41b8-b6f5-150b083f2ec8", "address": "fa:16:3e:af:11:0e", "network": {"id": "6d64fb31-1957-4722-a4b3-46b946bfb65a", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1232247602-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.190", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "278be2f8452946b9ab9c4bce8f9a7557", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5bd281ed-ae39-485f-90ee-4ee27994b5b0", "external-id": "nsx-vlan-transportzone-305", "segmentation_id": 305, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape3c363b2-cd", "ovs_interfaceid": "e3c363b2-cdba-41b8-b6f5-150b083f2ec8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1357.624438] env[69328]: DEBUG oslo_concurrency.lockutils [req-0a5cf2d4-bc93-4951-b6b8-fbf3db0078c4 req-1de99281-1f74-4a4b-be2e-ce0e8286c1ae service nova] Releasing lock "refresh_cache-7ee57873-8f9a-4bc6-9b88-261cd6239774" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1361.601801] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8745e37-b8ed-4ca1-9b24-c3908e6c08b9 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Volume attach. Driver type: vmdk {{(pid=69328) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1361.602034] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8745e37-b8ed-4ca1-9b24-c3908e6c08b9 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653992', 'volume_id': '61bc6b0d-9eae-421b-ac7c-12ff4c81341d', 'name': 'volume-61bc6b0d-9eae-421b-ac7c-12ff4c81341d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1a0e084a-f7b2-4f2e-b508-33caeed2ffeb', 'attached_at': '', 'detached_at': '', 'volume_id': '61bc6b0d-9eae-421b-ac7c-12ff4c81341d', 'serial': '61bc6b0d-9eae-421b-ac7c-12ff4c81341d'} {{(pid=69328) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1361.602913] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bca8778-ffb9-471e-bb8c-209fc8b919bb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.619102] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c1c194a-b962-4acc-8058-e20c163d95ff {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.643814] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8745e37-b8ed-4ca1-9b24-c3908e6c08b9 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Reconfiguring VM instance instance-0000007d to attach disk [datastore1] volume-61bc6b0d-9eae-421b-ac7c-12ff4c81341d/volume-61bc6b0d-9eae-421b-ac7c-12ff4c81341d.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1361.644071] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fc32f768-6736-448b-88b6-408aaeab026c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.661522] env[69328]: DEBUG oslo_vmware.api [None req-d8745e37-b8ed-4ca1-9b24-c3908e6c08b9 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1361.661522] env[69328]: value = "task-3274432" [ 1361.661522] env[69328]: _type = "Task" [ 1361.661522] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.668813] env[69328]: DEBUG oslo_vmware.api [None req-d8745e37-b8ed-4ca1-9b24-c3908e6c08b9 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274432, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.171737] env[69328]: DEBUG oslo_vmware.api [None req-d8745e37-b8ed-4ca1-9b24-c3908e6c08b9 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274432, 'name': ReconfigVM_Task, 'duration_secs': 0.35274} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.172033] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8745e37-b8ed-4ca1-9b24-c3908e6c08b9 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Reconfigured VM instance instance-0000007d to attach disk [datastore1] volume-61bc6b0d-9eae-421b-ac7c-12ff4c81341d/volume-61bc6b0d-9eae-421b-ac7c-12ff4c81341d.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1362.176590] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-15ed2b00-6b40-43b9-91e1-f3d5c1ff9fd4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.191263] env[69328]: DEBUG oslo_vmware.api [None req-d8745e37-b8ed-4ca1-9b24-c3908e6c08b9 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1362.191263] env[69328]: value = "task-3274433" [ 1362.191263] env[69328]: _type = "Task" [ 1362.191263] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1362.199131] env[69328]: DEBUG oslo_vmware.api [None req-d8745e37-b8ed-4ca1-9b24-c3908e6c08b9 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274433, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.700754] env[69328]: DEBUG oslo_vmware.api [None req-d8745e37-b8ed-4ca1-9b24-c3908e6c08b9 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274433, 'name': ReconfigVM_Task, 'duration_secs': 0.134039} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.701079] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8745e37-b8ed-4ca1-9b24-c3908e6c08b9 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653992', 'volume_id': '61bc6b0d-9eae-421b-ac7c-12ff4c81341d', 'name': 'volume-61bc6b0d-9eae-421b-ac7c-12ff4c81341d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1a0e084a-f7b2-4f2e-b508-33caeed2ffeb', 'attached_at': '', 'detached_at': '', 'volume_id': '61bc6b0d-9eae-421b-ac7c-12ff4c81341d', 'serial': '61bc6b0d-9eae-421b-ac7c-12ff4c81341d'} {{(pid=69328) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1363.737695] env[69328]: DEBUG nova.objects.instance [None req-d8745e37-b8ed-4ca1-9b24-c3908e6c08b9 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lazy-loading 'flavor' on Instance uuid 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1364.242948] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d8745e37-b8ed-4ca1-9b24-c3908e6c08b9 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "1a0e084a-f7b2-4f2e-b508-33caeed2ffeb" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.734s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1365.733101] env[69328]: DEBUG nova.compute.manager [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Stashing vm_state: active {{(pid=69328) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1366.249883] env[69328]: DEBUG oslo_concurrency.lockutils [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1366.250186] env[69328]: DEBUG oslo_concurrency.lockutils [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1366.754766] env[69328]: INFO nova.compute.claims [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1367.261257] env[69328]: INFO nova.compute.resource_tracker [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Updating resource usage from migration 12b881ee-3cef-4b8d-afcb-663fef68cce6 [ 1367.305147] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-394029e0-c8a4-4636-b84c-f3b9ded6c4ba {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.313084] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3408079-b7ac-4526-8129-2d062d92d8c8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.342280] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca1e9874-7565-46ec-b3f3-99e368677468 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.349077] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74e89f34-39f1-419c-be31-0e24e2bc948e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.361366] env[69328]: DEBUG nova.compute.provider_tree [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1367.864226] env[69328]: DEBUG nova.scheduler.client.report [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1368.369925] env[69328]: DEBUG oslo_concurrency.lockutils [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.120s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1368.370304] env[69328]: INFO nova.compute.manager [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Migrating [ 1368.885223] env[69328]: DEBUG oslo_concurrency.lockutils [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquiring lock "refresh_cache-1a0e084a-f7b2-4f2e-b508-33caeed2ffeb" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1368.885454] env[69328]: DEBUG oslo_concurrency.lockutils [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquired lock "refresh_cache-1a0e084a-f7b2-4f2e-b508-33caeed2ffeb" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1368.885618] env[69328]: DEBUG nova.network.neutron [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1369.589209] env[69328]: DEBUG nova.network.neutron [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Updating instance_info_cache with network_info: [{"id": "096b3ef6-28d7-4463-a0b2-884dd086287d", "address": "fa:16:3e:6a:a4:57", "network": {"id": "238bfce5-9117-4d8e-8dd7-445d8d894599", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-231958017-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8bbb75992830459c85c818e850261c61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3d7e184-c87f-47a5-8d0d-9fa20e07e669", "external-id": "nsx-vlan-transportzone-746", "segmentation_id": 746, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap096b3ef6-28", "ovs_interfaceid": "096b3ef6-28d7-4463-a0b2-884dd086287d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1370.091721] env[69328]: DEBUG oslo_concurrency.lockutils [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Releasing lock "refresh_cache-1a0e084a-f7b2-4f2e-b508-33caeed2ffeb" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1371.607425] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9274b26-6b7c-4600-b3f4-434a8dd440c7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.628336] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Updating instance '1a0e084a-f7b2-4f2e-b508-33caeed2ffeb' progress to 0 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1372.134318] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1372.134603] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ec2b15c1-fffa-4ae5-ad82-713589ac9bd4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.141446] env[69328]: DEBUG oslo_vmware.api [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1372.141446] env[69328]: value = "task-3274434" [ 1372.141446] env[69328]: _type = "Task" [ 1372.141446] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1372.149895] env[69328]: DEBUG oslo_vmware.api [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274434, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1372.651228] env[69328]: DEBUG oslo_vmware.api [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274434, 'name': PowerOffVM_Task, 'duration_secs': 0.171498} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1372.651536] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1372.651666] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Updating instance '1a0e084a-f7b2-4f2e-b508-33caeed2ffeb' progress to 17 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1373.158278] env[69328]: DEBUG nova.virt.hardware [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:34:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1373.158553] env[69328]: DEBUG nova.virt.hardware [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1373.158718] env[69328]: DEBUG nova.virt.hardware [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1373.158928] env[69328]: DEBUG nova.virt.hardware [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1373.159091] env[69328]: DEBUG nova.virt.hardware [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1373.159247] env[69328]: DEBUG nova.virt.hardware [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1373.159468] env[69328]: DEBUG nova.virt.hardware [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1373.159633] env[69328]: DEBUG nova.virt.hardware [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1373.159799] env[69328]: DEBUG nova.virt.hardware [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1373.159973] env[69328]: DEBUG nova.virt.hardware [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1373.160197] env[69328]: DEBUG nova.virt.hardware [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1373.166792] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-900618d0-3181-4ed0-ad05-9823c2d0f53d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.189019] env[69328]: DEBUG oslo_vmware.api [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1373.189019] env[69328]: value = "task-3274435" [ 1373.189019] env[69328]: _type = "Task" [ 1373.189019] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1373.197977] env[69328]: DEBUG oslo_vmware.api [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274435, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.698888] env[69328]: DEBUG oslo_vmware.api [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274435, 'name': ReconfigVM_Task, 'duration_secs': 0.1913} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1373.699253] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Updating instance '1a0e084a-f7b2-4f2e-b508-33caeed2ffeb' progress to 33 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1374.205659] env[69328]: DEBUG nova.virt.hardware [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1374.205938] env[69328]: DEBUG nova.virt.hardware [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1374.206051] env[69328]: DEBUG nova.virt.hardware [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1374.206223] env[69328]: DEBUG nova.virt.hardware [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1374.206369] env[69328]: DEBUG nova.virt.hardware [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1374.206516] env[69328]: DEBUG nova.virt.hardware [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1374.206718] env[69328]: DEBUG nova.virt.hardware [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1374.206876] env[69328]: DEBUG nova.virt.hardware [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1374.207113] env[69328]: DEBUG nova.virt.hardware [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1374.207285] env[69328]: DEBUG nova.virt.hardware [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1374.207460] env[69328]: DEBUG nova.virt.hardware [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1374.212733] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Reconfiguring VM instance instance-0000007d to detach disk 2000 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1374.213026] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-51a9c686-f716-41cc-b872-dacdada49f5a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.232264] env[69328]: DEBUG oslo_vmware.api [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1374.232264] env[69328]: value = "task-3274436" [ 1374.232264] env[69328]: _type = "Task" [ 1374.232264] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1374.241161] env[69328]: DEBUG oslo_vmware.api [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274436, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.742401] env[69328]: DEBUG oslo_vmware.api [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274436, 'name': ReconfigVM_Task, 'duration_secs': 0.237098} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1374.742670] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Reconfigured VM instance instance-0000007d to detach disk 2000 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1374.743436] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7accb04f-6b3b-4d4e-b727-e39d0f570d89 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.766807] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Reconfiguring VM instance instance-0000007d to attach disk [datastore1] 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb/1a0e084a-f7b2-4f2e-b508-33caeed2ffeb.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1374.767369] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aeb0e673-3025-4a6b-963b-a7e67d7f9923 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.783741] env[69328]: DEBUG oslo_vmware.api [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1374.783741] env[69328]: value = "task-3274437" [ 1374.783741] env[69328]: _type = "Task" [ 1374.783741] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1374.790950] env[69328]: DEBUG oslo_vmware.api [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274437, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.293792] env[69328]: DEBUG oslo_vmware.api [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274437, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.794653] env[69328]: DEBUG oslo_vmware.api [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274437, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.295072] env[69328]: DEBUG oslo_vmware.api [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274437, 'name': ReconfigVM_Task, 'duration_secs': 1.255765} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.295355] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Reconfigured VM instance instance-0000007d to attach disk [datastore1] 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb/1a0e084a-f7b2-4f2e-b508-33caeed2ffeb.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1376.295634] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Updating instance '1a0e084a-f7b2-4f2e-b508-33caeed2ffeb' progress to 50 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1376.802535] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aee2e55a-3b24-4d2f-90a1-dbde6dd3ca05 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.823347] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02b26e6d-79b8-40bb-bfd0-48507c96c4e3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.844049] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Updating instance '1a0e084a-f7b2-4f2e-b508-33caeed2ffeb' progress to 67 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1378.477908] env[69328]: DEBUG nova.network.neutron [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Port 096b3ef6-28d7-4463-a0b2-884dd086287d binding to destination host cpu-1 is already ACTIVE {{(pid=69328) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1379.499902] env[69328]: DEBUG oslo_concurrency.lockutils [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquiring lock "1a0e084a-f7b2-4f2e-b508-33caeed2ffeb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1379.500287] env[69328]: DEBUG oslo_concurrency.lockutils [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "1a0e084a-f7b2-4f2e-b508-33caeed2ffeb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1379.500334] env[69328]: DEBUG oslo_concurrency.lockutils [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "1a0e084a-f7b2-4f2e-b508-33caeed2ffeb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1380.533751] env[69328]: DEBUG oslo_concurrency.lockutils [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquiring lock "refresh_cache-1a0e084a-f7b2-4f2e-b508-33caeed2ffeb" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1380.534045] env[69328]: DEBUG oslo_concurrency.lockutils [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquired lock "refresh_cache-1a0e084a-f7b2-4f2e-b508-33caeed2ffeb" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1380.534186] env[69328]: DEBUG nova.network.neutron [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1381.238417] env[69328]: DEBUG nova.network.neutron [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Updating instance_info_cache with network_info: [{"id": "096b3ef6-28d7-4463-a0b2-884dd086287d", "address": "fa:16:3e:6a:a4:57", "network": {"id": "238bfce5-9117-4d8e-8dd7-445d8d894599", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-231958017-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8bbb75992830459c85c818e850261c61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3d7e184-c87f-47a5-8d0d-9fa20e07e669", "external-id": "nsx-vlan-transportzone-746", "segmentation_id": 746, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap096b3ef6-28", "ovs_interfaceid": "096b3ef6-28d7-4463-a0b2-884dd086287d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1381.741632] env[69328]: DEBUG oslo_concurrency.lockutils [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Releasing lock "refresh_cache-1a0e084a-f7b2-4f2e-b508-33caeed2ffeb" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1382.251514] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9304ee91-926f-4882-9199-f70b99e2e9c1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.258425] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db39edd7-1134-4ca4-9c54-b76b0f2302ce {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.337876] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1383.340462] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1383.355101] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0842a24-16ff-43de-84f8-5cdf3092697a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.376878] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c8f4eef-dd69-4dd2-9bb4-75834e485010 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.383712] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Updating instance '1a0e084a-f7b2-4f2e-b508-33caeed2ffeb' progress to 83 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1383.845516] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1383.845821] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1383.846082] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1383.846340] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1383.846684] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1383.846814] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1383.847057] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69328) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1383.847332] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager.update_available_resource {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1383.889574] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1383.889850] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-716d6e4e-f7f3-45d7-8958-dd60c5d613ee {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.898120] env[69328]: DEBUG oslo_vmware.api [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1383.898120] env[69328]: value = "task-3274438" [ 1383.898120] env[69328]: _type = "Task" [ 1383.898120] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.905451] env[69328]: DEBUG oslo_vmware.api [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274438, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.350664] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1384.350958] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1384.351131] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1384.351273] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69328) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1384.352180] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e721912a-0941-4121-8f78-3b366e52b10f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.360435] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5f7b293-5a38-4fea-ba3a-5e591352003a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.373611] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7f56c90-6e08-4870-a3b8-4ef931c1ac2c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.379913] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d0aa0a0-3924-4adb-ab23-c527007aa7b0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.410256] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180625MB free_disk=116GB free_vcpus=48 pci_devices=None {{(pid=69328) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1384.410369] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1384.410754] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1384.420206] env[69328]: DEBUG oslo_vmware.api [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274438, 'name': PowerOnVM_Task, 'duration_secs': 0.421776} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.420753] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1384.420753] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-18f4d5c3-3e66-46f9-a8e3-e5af117e7c24 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Updating instance '1a0e084a-f7b2-4f2e-b508-33caeed2ffeb' progress to 100 {{(pid=69328) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1385.421336] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Applying migration context for instance 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb as it has an incoming, in-progress migration 12b881ee-3cef-4b8d-afcb-663fef68cce6. Migration status is post-migrating {{(pid=69328) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1385.422023] env[69328]: INFO nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Updating resource usage from migration 12b881ee-3cef-4b8d-afcb-663fef68cce6 [ 1385.438432] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 7ee57873-8f9a-4bc6-9b88-261cd6239774 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1385.438584] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Migration 12b881ee-3cef-4b8d-afcb-663fef68cce6 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1385.438709] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1385.438882] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=69328) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1385.439030] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1152MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=69328) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1385.485576] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d03b8a72-34b7-4619-9612-b7cb77c42c59 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.493280] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-092b03af-efee-40b1-8883-f08e90fe6b0f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.524360] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f26fbaac-5b78-48aa-85ad-aab402182c5a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.531655] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fed69a64-1b2e-45e1-8f4f-37e7f67a0662 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.544726] env[69328]: DEBUG nova.compute.provider_tree [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1386.047913] env[69328]: DEBUG nova.scheduler.client.report [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1386.552018] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69328) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1386.552309] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.142s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1387.179187] env[69328]: DEBUG nova.network.neutron [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Port 096b3ef6-28d7-4463-a0b2-884dd086287d binding to destination host cpu-1 is already ACTIVE {{(pid=69328) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1387.179480] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquiring lock "refresh_cache-1a0e084a-f7b2-4f2e-b508-33caeed2ffeb" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1387.179632] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquired lock "refresh_cache-1a0e084a-f7b2-4f2e-b508-33caeed2ffeb" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1387.179798] env[69328]: DEBUG nova.network.neutron [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1387.882939] env[69328]: DEBUG nova.network.neutron [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Updating instance_info_cache with network_info: [{"id": "096b3ef6-28d7-4463-a0b2-884dd086287d", "address": "fa:16:3e:6a:a4:57", "network": {"id": "238bfce5-9117-4d8e-8dd7-445d8d894599", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-231958017-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8bbb75992830459c85c818e850261c61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3d7e184-c87f-47a5-8d0d-9fa20e07e669", "external-id": "nsx-vlan-transportzone-746", "segmentation_id": 746, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap096b3ef6-28", "ovs_interfaceid": "096b3ef6-28d7-4463-a0b2-884dd086287d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1388.385559] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Releasing lock "refresh_cache-1a0e084a-f7b2-4f2e-b508-33caeed2ffeb" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1388.889044] env[69328]: DEBUG nova.compute.manager [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=69328) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 1389.994095] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1389.994095] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1390.496481] env[69328]: DEBUG nova.objects.instance [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lazy-loading 'migration_context' on Instance uuid 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1391.050201] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51210c32-acb0-46cc-b718-423ab31c4a1c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.057678] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3191f5cd-701a-48d3-a16e-90a997f3f150 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.087200] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cfb0251-2289-4e8b-ba0b-7b21b3e9ac83 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.094408] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09843b91-daa6-40f6-99a2-31953f428c6a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.107013] env[69328]: DEBUG nova.compute.provider_tree [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1391.610066] env[69328]: DEBUG nova.scheduler.client.report [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1392.623115] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.629s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1394.157108] env[69328]: INFO nova.compute.manager [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Swapping old allocation on dict_keys(['149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e']) held by migration 12b881ee-3cef-4b8d-afcb-663fef68cce6 for instance [ 1394.177403] env[69328]: DEBUG nova.scheduler.client.report [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Overwriting current allocation {'allocations': {'149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 181}}, 'project_id': '8bbb75992830459c85c818e850261c61', 'user_id': 'a07713f537e84711bc559a085d1e05f1', 'consumer_generation': 1} on consumer 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb {{(pid=69328) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1394.259215] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquiring lock "refresh_cache-1a0e084a-f7b2-4f2e-b508-33caeed2ffeb" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1394.259304] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquired lock "refresh_cache-1a0e084a-f7b2-4f2e-b508-33caeed2ffeb" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1394.259479] env[69328]: DEBUG nova.network.neutron [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1394.858224] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8fc4ae24-9077-49f6-8198-db6a714699be tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Acquiring lock "7ee57873-8f9a-4bc6-9b88-261cd6239774" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1394.858476] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8fc4ae24-9077-49f6-8198-db6a714699be tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lock "7ee57873-8f9a-4bc6-9b88-261cd6239774" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1394.964491] env[69328]: DEBUG nova.network.neutron [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Updating instance_info_cache with network_info: [{"id": "096b3ef6-28d7-4463-a0b2-884dd086287d", "address": "fa:16:3e:6a:a4:57", "network": {"id": "238bfce5-9117-4d8e-8dd7-445d8d894599", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-231958017-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8bbb75992830459c85c818e850261c61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3d7e184-c87f-47a5-8d0d-9fa20e07e669", "external-id": "nsx-vlan-transportzone-746", "segmentation_id": 746, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap096b3ef6-28", "ovs_interfaceid": "096b3ef6-28d7-4463-a0b2-884dd086287d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1395.361891] env[69328]: DEBUG nova.compute.utils [None req-8fc4ae24-9077-49f6-8198-db6a714699be tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1395.467614] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Releasing lock "refresh_cache-1a0e084a-f7b2-4f2e-b508-33caeed2ffeb" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1395.468690] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58e77f10-7418-441b-ab35-fe28fb4bb880 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.476083] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68d939cd-08ed-4175-9c60-8445186b9da7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.865574] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8fc4ae24-9077-49f6-8198-db6a714699be tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lock "7ee57873-8f9a-4bc6-9b88-261cd6239774" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1396.550978] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1396.550978] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e4728889-2a66-4444-8158-fe451cdb0dd0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.558010] env[69328]: DEBUG oslo_vmware.api [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1396.558010] env[69328]: value = "task-3274439" [ 1396.558010] env[69328]: _type = "Task" [ 1396.558010] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.566373] env[69328]: DEBUG oslo_vmware.api [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274439, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.931191] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8fc4ae24-9077-49f6-8198-db6a714699be tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Acquiring lock "7ee57873-8f9a-4bc6-9b88-261cd6239774" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1396.931442] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8fc4ae24-9077-49f6-8198-db6a714699be tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lock "7ee57873-8f9a-4bc6-9b88-261cd6239774" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1396.931676] env[69328]: INFO nova.compute.manager [None req-8fc4ae24-9077-49f6-8198-db6a714699be tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Attaching volume 2a193f12-5510-489f-b53b-c8a4f6a0a028 to /dev/sdb [ 1396.961603] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-074108bd-717c-4ce4-82ba-c958b1d64e0d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.968853] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e86d6f71-3fa3-4e85-83bd-b04c358e2ab9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.981880] env[69328]: DEBUG nova.virt.block_device [None req-8fc4ae24-9077-49f6-8198-db6a714699be tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Updating existing volume attachment record: add9c459-5651-4364-9835-dfd0537a283d {{(pid=69328) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1397.067143] env[69328]: DEBUG oslo_vmware.api [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274439, 'name': PowerOffVM_Task, 'duration_secs': 0.251011} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.067493] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1397.068251] env[69328]: DEBUG nova.virt.hardware [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1397.068571] env[69328]: DEBUG nova.virt.hardware [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1397.068768] env[69328]: DEBUG nova.virt.hardware [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1397.069035] env[69328]: DEBUG nova.virt.hardware [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1397.069253] env[69328]: DEBUG nova.virt.hardware [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1397.069467] env[69328]: DEBUG nova.virt.hardware [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1397.069746] env[69328]: DEBUG nova.virt.hardware [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1397.069967] env[69328]: DEBUG nova.virt.hardware [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1397.070210] env[69328]: DEBUG nova.virt.hardware [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1397.070440] env[69328]: DEBUG nova.virt.hardware [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1397.070678] env[69328]: DEBUG nova.virt.hardware [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1397.075566] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-43002b58-5d5c-40bf-b2bc-e5100cff6b9a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.090484] env[69328]: DEBUG oslo_vmware.api [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1397.090484] env[69328]: value = "task-3274440" [ 1397.090484] env[69328]: _type = "Task" [ 1397.090484] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.098058] env[69328]: DEBUG oslo_vmware.api [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274440, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.600311] env[69328]: DEBUG oslo_vmware.api [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274440, 'name': ReconfigVM_Task, 'duration_secs': 0.137636} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.601080] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5631336-f7e4-4c44-936c-2f7be69b7b39 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.621039] env[69328]: DEBUG nova.virt.hardware [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1397.621273] env[69328]: DEBUG nova.virt.hardware [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1397.621434] env[69328]: DEBUG nova.virt.hardware [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1397.621617] env[69328]: DEBUG nova.virt.hardware [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1397.621761] env[69328]: DEBUG nova.virt.hardware [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1397.621906] env[69328]: DEBUG nova.virt.hardware [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1397.622119] env[69328]: DEBUG nova.virt.hardware [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1397.622291] env[69328]: DEBUG nova.virt.hardware [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1397.622457] env[69328]: DEBUG nova.virt.hardware [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1397.622616] env[69328]: DEBUG nova.virt.hardware [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1397.622783] env[69328]: DEBUG nova.virt.hardware [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1397.623540] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dfe6ac3c-4c51-4709-aa2e-dade31810d4d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.628451] env[69328]: DEBUG oslo_vmware.api [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1397.628451] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52a13e65-f8e0-981d-eae3-3e5b44a25fb5" [ 1397.628451] env[69328]: _type = "Task" [ 1397.628451] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.635768] env[69328]: DEBUG oslo_vmware.api [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52a13e65-f8e0-981d-eae3-3e5b44a25fb5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.138131] env[69328]: DEBUG oslo_vmware.api [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52a13e65-f8e0-981d-eae3-3e5b44a25fb5, 'name': SearchDatastore_Task, 'duration_secs': 0.007509} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.143343] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Reconfiguring VM instance instance-0000007d to detach disk 2000 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1398.143600] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-db0658d2-7692-4b49-a996-953a7383958e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.160928] env[69328]: DEBUG oslo_vmware.api [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1398.160928] env[69328]: value = "task-3274442" [ 1398.160928] env[69328]: _type = "Task" [ 1398.160928] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.168503] env[69328]: DEBUG oslo_vmware.api [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274442, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.673243] env[69328]: DEBUG oslo_vmware.api [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274442, 'name': ReconfigVM_Task, 'duration_secs': 0.206811} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.673243] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Reconfigured VM instance instance-0000007d to detach disk 2000 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1398.673716] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38558936-bb5f-441f-84fa-b2db041a2a78 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.697021] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Reconfiguring VM instance instance-0000007d to attach disk [datastore1] 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb/1a0e084a-f7b2-4f2e-b508-33caeed2ffeb.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1398.697337] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-52344f6d-63f9-4703-86d1-a64f2e909b2e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.714754] env[69328]: DEBUG oslo_vmware.api [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1398.714754] env[69328]: value = "task-3274443" [ 1398.714754] env[69328]: _type = "Task" [ 1398.714754] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.721991] env[69328]: DEBUG oslo_vmware.api [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274443, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.224247] env[69328]: DEBUG oslo_vmware.api [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274443, 'name': ReconfigVM_Task, 'duration_secs': 0.270142} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.224522] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Reconfigured VM instance instance-0000007d to attach disk [datastore1] 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb/1a0e084a-f7b2-4f2e-b508-33caeed2ffeb.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1399.225339] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ac371c3-8945-49ed-addc-72e7be3c433a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.244581] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac3a89cf-5160-4afb-802b-19f91178dc19 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.263881] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe0b5ea7-6fff-4576-8ea0-8128ba228676 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.282942] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17085654-fc9c-4f23-8f78-c0f8fe15b29e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.289043] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1399.289255] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9f612a80-9e29-4688-ba4b-cd96dd80c202 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.294737] env[69328]: DEBUG oslo_vmware.api [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1399.294737] env[69328]: value = "task-3274445" [ 1399.294737] env[69328]: _type = "Task" [ 1399.294737] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.301311] env[69328]: DEBUG oslo_vmware.api [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274445, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.804845] env[69328]: DEBUG oslo_vmware.api [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274445, 'name': PowerOnVM_Task, 'duration_secs': 0.36541} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.805232] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1400.846627] env[69328]: INFO nova.compute.manager [None req-d1a6bf1f-934b-46b3-93b4-d4fba30dbd86 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Updating instance to original state: 'active' [ 1401.524680] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-8fc4ae24-9077-49f6-8198-db6a714699be tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Volume attach. Driver type: vmdk {{(pid=69328) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1401.524924] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-8fc4ae24-9077-49f6-8198-db6a714699be tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653993', 'volume_id': '2a193f12-5510-489f-b53b-c8a4f6a0a028', 'name': 'volume-2a193f12-5510-489f-b53b-c8a4f6a0a028', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7ee57873-8f9a-4bc6-9b88-261cd6239774', 'attached_at': '', 'detached_at': '', 'volume_id': '2a193f12-5510-489f-b53b-c8a4f6a0a028', 'serial': '2a193f12-5510-489f-b53b-c8a4f6a0a028'} {{(pid=69328) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1401.525865] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49e40104-81a7-4f63-9586-6b95516ef446 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.541907] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-699b81e3-2c81-4ff2-8d98-ffc0b2753b55 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.565722] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-8fc4ae24-9077-49f6-8198-db6a714699be tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Reconfiguring VM instance instance-0000007e to attach disk [datastore1] volume-2a193f12-5510-489f-b53b-c8a4f6a0a028/volume-2a193f12-5510-489f-b53b-c8a4f6a0a028.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1401.565966] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c578b01d-be15-4071-a773-e2f8ff05eec3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.583402] env[69328]: DEBUG oslo_vmware.api [None req-8fc4ae24-9077-49f6-8198-db6a714699be tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Waiting for the task: (returnval){ [ 1401.583402] env[69328]: value = "task-3274446" [ 1401.583402] env[69328]: _type = "Task" [ 1401.583402] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.590916] env[69328]: DEBUG oslo_vmware.api [None req-8fc4ae24-9077-49f6-8198-db6a714699be tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274446, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.763758] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d7827783-bb4e-488c-a664-6c4667592891 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquiring lock "1a0e084a-f7b2-4f2e-b508-33caeed2ffeb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1401.764011] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d7827783-bb4e-488c-a664-6c4667592891 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "1a0e084a-f7b2-4f2e-b508-33caeed2ffeb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1401.764267] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d7827783-bb4e-488c-a664-6c4667592891 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquiring lock "1a0e084a-f7b2-4f2e-b508-33caeed2ffeb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1401.764466] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d7827783-bb4e-488c-a664-6c4667592891 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "1a0e084a-f7b2-4f2e-b508-33caeed2ffeb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1401.764637] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d7827783-bb4e-488c-a664-6c4667592891 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "1a0e084a-f7b2-4f2e-b508-33caeed2ffeb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1401.766868] env[69328]: INFO nova.compute.manager [None req-d7827783-bb4e-488c-a664-6c4667592891 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Terminating instance [ 1402.094176] env[69328]: DEBUG oslo_vmware.api [None req-8fc4ae24-9077-49f6-8198-db6a714699be tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274446, 'name': ReconfigVM_Task, 'duration_secs': 0.400522} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.094552] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-8fc4ae24-9077-49f6-8198-db6a714699be tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Reconfigured VM instance instance-0000007e to attach disk [datastore1] volume-2a193f12-5510-489f-b53b-c8a4f6a0a028/volume-2a193f12-5510-489f-b53b-c8a4f6a0a028.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1402.099230] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-446f81fe-718e-47e6-8679-12b2233741c3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.113178] env[69328]: DEBUG oslo_vmware.api [None req-8fc4ae24-9077-49f6-8198-db6a714699be tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Waiting for the task: (returnval){ [ 1402.113178] env[69328]: value = "task-3274447" [ 1402.113178] env[69328]: _type = "Task" [ 1402.113178] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.120717] env[69328]: DEBUG oslo_vmware.api [None req-8fc4ae24-9077-49f6-8198-db6a714699be tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274447, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.271495] env[69328]: DEBUG nova.compute.manager [None req-d7827783-bb4e-488c-a664-6c4667592891 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1402.271740] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7827783-bb4e-488c-a664-6c4667592891 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1402.272096] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b04a77cb-6ba0-41c5-9b99-61d2adbe8aae {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.282621] env[69328]: DEBUG oslo_vmware.api [None req-d7827783-bb4e-488c-a664-6c4667592891 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1402.282621] env[69328]: value = "task-3274448" [ 1402.282621] env[69328]: _type = "Task" [ 1402.282621] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.291707] env[69328]: DEBUG oslo_vmware.api [None req-d7827783-bb4e-488c-a664-6c4667592891 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274448, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.623137] env[69328]: DEBUG oslo_vmware.api [None req-8fc4ae24-9077-49f6-8198-db6a714699be tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274447, 'name': ReconfigVM_Task, 'duration_secs': 0.182939} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.623514] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-8fc4ae24-9077-49f6-8198-db6a714699be tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653993', 'volume_id': '2a193f12-5510-489f-b53b-c8a4f6a0a028', 'name': 'volume-2a193f12-5510-489f-b53b-c8a4f6a0a028', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7ee57873-8f9a-4bc6-9b88-261cd6239774', 'attached_at': '', 'detached_at': '', 'volume_id': '2a193f12-5510-489f-b53b-c8a4f6a0a028', 'serial': '2a193f12-5510-489f-b53b-c8a4f6a0a028'} {{(pid=69328) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1402.792270] env[69328]: DEBUG oslo_vmware.api [None req-d7827783-bb4e-488c-a664-6c4667592891 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274448, 'name': PowerOffVM_Task, 'duration_secs': 0.189256} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.792538] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7827783-bb4e-488c-a664-6c4667592891 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1402.792733] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-d7827783-bb4e-488c-a664-6c4667592891 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Volume detach. Driver type: vmdk {{(pid=69328) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1402.792921] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-d7827783-bb4e-488c-a664-6c4667592891 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653992', 'volume_id': '61bc6b0d-9eae-421b-ac7c-12ff4c81341d', 'name': 'volume-61bc6b0d-9eae-421b-ac7c-12ff4c81341d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '1a0e084a-f7b2-4f2e-b508-33caeed2ffeb', 'attached_at': '2025-04-03T17:49:14.000000', 'detached_at': '', 'volume_id': '61bc6b0d-9eae-421b-ac7c-12ff4c81341d', 'serial': '61bc6b0d-9eae-421b-ac7c-12ff4c81341d'} {{(pid=69328) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1402.793694] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74525bb8-51b9-4b9d-b98a-636970f4203d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.813111] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daed3b7a-d41d-4d7a-8d96-c560954ff7e4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.819221] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb653320-aa8e-4d7e-a7fd-aaa8c491aa17 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.838283] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-444b7b56-a945-45ed-b15c-73be53c18ccd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.852066] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-d7827783-bb4e-488c-a664-6c4667592891 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] The volume has not been displaced from its original location: [datastore1] volume-61bc6b0d-9eae-421b-ac7c-12ff4c81341d/volume-61bc6b0d-9eae-421b-ac7c-12ff4c81341d.vmdk. No consolidation needed. {{(pid=69328) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1402.857213] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-d7827783-bb4e-488c-a664-6c4667592891 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Reconfiguring VM instance instance-0000007d to detach disk 2001 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1402.857464] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1aed1198-4971-4465-ad96-5a624f4fbe33 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.874977] env[69328]: DEBUG oslo_vmware.api [None req-d7827783-bb4e-488c-a664-6c4667592891 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1402.874977] env[69328]: value = "task-3274449" [ 1402.874977] env[69328]: _type = "Task" [ 1402.874977] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.883571] env[69328]: DEBUG oslo_vmware.api [None req-d7827783-bb4e-488c-a664-6c4667592891 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274449, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.384816] env[69328]: DEBUG oslo_vmware.api [None req-d7827783-bb4e-488c-a664-6c4667592891 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274449, 'name': ReconfigVM_Task, 'duration_secs': 0.229078} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.385190] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-d7827783-bb4e-488c-a664-6c4667592891 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Reconfigured VM instance instance-0000007d to detach disk 2001 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1403.389707] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-974a9e0a-0d14-4f2b-9a30-37c4c5a5f1fc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.404174] env[69328]: DEBUG oslo_vmware.api [None req-d7827783-bb4e-488c-a664-6c4667592891 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1403.404174] env[69328]: value = "task-3274450" [ 1403.404174] env[69328]: _type = "Task" [ 1403.404174] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.411493] env[69328]: DEBUG oslo_vmware.api [None req-d7827783-bb4e-488c-a664-6c4667592891 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274450, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.657333] env[69328]: DEBUG nova.objects.instance [None req-8fc4ae24-9077-49f6-8198-db6a714699be tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lazy-loading 'flavor' on Instance uuid 7ee57873-8f9a-4bc6-9b88-261cd6239774 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1403.913740] env[69328]: DEBUG oslo_vmware.api [None req-d7827783-bb4e-488c-a664-6c4667592891 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274450, 'name': ReconfigVM_Task, 'duration_secs': 0.181267} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.914100] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-d7827783-bb4e-488c-a664-6c4667592891 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653992', 'volume_id': '61bc6b0d-9eae-421b-ac7c-12ff4c81341d', 'name': 'volume-61bc6b0d-9eae-421b-ac7c-12ff4c81341d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '1a0e084a-f7b2-4f2e-b508-33caeed2ffeb', 'attached_at': '2025-04-03T17:49:14.000000', 'detached_at': '', 'volume_id': '61bc6b0d-9eae-421b-ac7c-12ff4c81341d', 'serial': '61bc6b0d-9eae-421b-ac7c-12ff4c81341d'} {{(pid=69328) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1403.914452] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d7827783-bb4e-488c-a664-6c4667592891 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1403.915228] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b4cdd88-70a7-4dd2-94b1-2578f8d36a7c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.921771] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d7827783-bb4e-488c-a664-6c4667592891 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1403.922016] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-30b39143-1eb0-4375-b70f-df1273d3273c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.987218] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d7827783-bb4e-488c-a664-6c4667592891 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1403.987604] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d7827783-bb4e-488c-a664-6c4667592891 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1403.987896] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7827783-bb4e-488c-a664-6c4667592891 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Deleting the datastore file [datastore1] 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1403.988285] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e37e6614-afe6-47a8-9a43-4d3cfac293c3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.995987] env[69328]: DEBUG oslo_vmware.api [None req-d7827783-bb4e-488c-a664-6c4667592891 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1403.995987] env[69328]: value = "task-3274452" [ 1403.995987] env[69328]: _type = "Task" [ 1403.995987] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.004823] env[69328]: DEBUG oslo_vmware.api [None req-d7827783-bb4e-488c-a664-6c4667592891 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274452, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.163203] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8fc4ae24-9077-49f6-8198-db6a714699be tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lock "7ee57873-8f9a-4bc6-9b88-261cd6239774" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.232s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1404.505541] env[69328]: DEBUG oslo_vmware.api [None req-d7827783-bb4e-488c-a664-6c4667592891 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274452, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.16287} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.505862] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7827783-bb4e-488c-a664-6c4667592891 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1404.506066] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d7827783-bb4e-488c-a664-6c4667592891 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1404.506250] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-d7827783-bb4e-488c-a664-6c4667592891 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1404.506425] env[69328]: INFO nova.compute.manager [None req-d7827783-bb4e-488c-a664-6c4667592891 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Took 2.23 seconds to destroy the instance on the hypervisor. [ 1404.506663] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d7827783-bb4e-488c-a664-6c4667592891 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1404.506852] env[69328]: DEBUG nova.compute.manager [-] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1404.506945] env[69328]: DEBUG nova.network.neutron [-] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1404.915572] env[69328]: DEBUG nova.compute.manager [req-b57fb8be-685b-4cf4-a1e8-1fab3d45d28f req-c89978fa-adb5-4f68-aa74-ac970f2924ba service nova] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Received event network-vif-deleted-096b3ef6-28d7-4463-a0b2-884dd086287d {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1404.915725] env[69328]: INFO nova.compute.manager [req-b57fb8be-685b-4cf4-a1e8-1fab3d45d28f req-c89978fa-adb5-4f68-aa74-ac970f2924ba service nova] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Neutron deleted interface 096b3ef6-28d7-4463-a0b2-884dd086287d; detaching it from the instance and deleting it from the info cache [ 1404.915903] env[69328]: DEBUG nova.network.neutron [req-b57fb8be-685b-4cf4-a1e8-1fab3d45d28f req-c89978fa-adb5-4f68-aa74-ac970f2924ba service nova] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1404.987108] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9d344437-5ea2-4b51-a093-606b8ee1f6f0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Acquiring lock "7ee57873-8f9a-4bc6-9b88-261cd6239774" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1404.987228] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9d344437-5ea2-4b51-a093-606b8ee1f6f0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lock "7ee57873-8f9a-4bc6-9b88-261cd6239774" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1405.394146] env[69328]: DEBUG nova.network.neutron [-] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1405.418614] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-44b92d8b-ee40-40ef-af15-f20274fb302c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.429097] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4281ef4-29a9-4c10-a0c7-e368cf9c2cb0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.454534] env[69328]: DEBUG nova.compute.manager [req-b57fb8be-685b-4cf4-a1e8-1fab3d45d28f req-c89978fa-adb5-4f68-aa74-ac970f2924ba service nova] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Detach interface failed, port_id=096b3ef6-28d7-4463-a0b2-884dd086287d, reason: Instance 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1405.490706] env[69328]: DEBUG nova.compute.utils [None req-9d344437-5ea2-4b51-a093-606b8ee1f6f0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1405.896908] env[69328]: INFO nova.compute.manager [-] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Took 1.39 seconds to deallocate network for instance. [ 1405.998455] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9d344437-5ea2-4b51-a093-606b8ee1f6f0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lock "7ee57873-8f9a-4bc6-9b88-261cd6239774" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.011s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1406.441311] env[69328]: INFO nova.compute.manager [None req-d7827783-bb4e-488c-a664-6c4667592891 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Took 0.54 seconds to detach 1 volumes for instance. [ 1406.949308] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d7827783-bb4e-488c-a664-6c4667592891 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1406.949668] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d7827783-bb4e-488c-a664-6c4667592891 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1406.949668] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d7827783-bb4e-488c-a664-6c4667592891 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1406.979274] env[69328]: INFO nova.scheduler.client.report [None req-d7827783-bb4e-488c-a664-6c4667592891 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Deleted allocations for instance 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb [ 1407.054783] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9d344437-5ea2-4b51-a093-606b8ee1f6f0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Acquiring lock "7ee57873-8f9a-4bc6-9b88-261cd6239774" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1407.054981] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9d344437-5ea2-4b51-a093-606b8ee1f6f0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lock "7ee57873-8f9a-4bc6-9b88-261cd6239774" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1407.055232] env[69328]: INFO nova.compute.manager [None req-9d344437-5ea2-4b51-a093-606b8ee1f6f0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Attaching volume 39b7488a-243c-4635-b3af-30956e6a514e to /dev/sdc [ 1407.092964] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc3b8fdb-83d1-4f4b-a46c-3252de74f254 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.101123] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4c273e5-7a28-46c1-b8fb-d4e8b73d4b90 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.115095] env[69328]: DEBUG nova.virt.block_device [None req-9d344437-5ea2-4b51-a093-606b8ee1f6f0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Updating existing volume attachment record: dd528b2e-35a2-4b10-ba0a-27e1966d536d {{(pid=69328) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1407.488541] env[69328]: DEBUG oslo_concurrency.lockutils [None req-d7827783-bb4e-488c-a664-6c4667592891 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "1a0e084a-f7b2-4f2e-b508-33caeed2ffeb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.724s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1408.909467] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquiring lock "bb27c90e-af74-4bd4-9aa4-cc0d12e305e1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1408.909743] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "bb27c90e-af74-4bd4-9aa4-cc0d12e305e1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1409.412551] env[69328]: DEBUG nova.compute.manager [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Starting instance... {{(pid=69328) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1409.935404] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1409.935705] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1409.937163] env[69328]: INFO nova.compute.claims [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1410.981111] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22576eb9-503b-4909-b0b0-845fdd767c1f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.989014] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1b0f60a-9978-42fc-af0d-7110ab42189b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.017971] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e6ea9e2-2035-4626-8ff4-89215ad42ba5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.024673] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fad3fe7-84a8-4c15-9108-c376fc281bbd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.037346] env[69328]: DEBUG nova.compute.provider_tree [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1411.540591] env[69328]: DEBUG nova.scheduler.client.report [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1411.659337] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d344437-5ea2-4b51-a093-606b8ee1f6f0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Volume attach. Driver type: vmdk {{(pid=69328) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1411.659578] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d344437-5ea2-4b51-a093-606b8ee1f6f0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653994', 'volume_id': '39b7488a-243c-4635-b3af-30956e6a514e', 'name': 'volume-39b7488a-243c-4635-b3af-30956e6a514e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7ee57873-8f9a-4bc6-9b88-261cd6239774', 'attached_at': '', 'detached_at': '', 'volume_id': '39b7488a-243c-4635-b3af-30956e6a514e', 'serial': '39b7488a-243c-4635-b3af-30956e6a514e'} {{(pid=69328) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1411.660451] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e13ab2be-d605-417b-b8f3-5ba5140d374f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.677554] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7c74f21-4275-4534-a2b4-839f396a05a8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.703694] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d344437-5ea2-4b51-a093-606b8ee1f6f0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Reconfiguring VM instance instance-0000007e to attach disk [datastore1] volume-39b7488a-243c-4635-b3af-30956e6a514e/volume-39b7488a-243c-4635-b3af-30956e6a514e.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1411.703976] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-522d0413-ba23-42de-aee8-1e2f357345dd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.723030] env[69328]: DEBUG oslo_vmware.api [None req-9d344437-5ea2-4b51-a093-606b8ee1f6f0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Waiting for the task: (returnval){ [ 1411.723030] env[69328]: value = "task-3274455" [ 1411.723030] env[69328]: _type = "Task" [ 1411.723030] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1411.730315] env[69328]: DEBUG oslo_vmware.api [None req-9d344437-5ea2-4b51-a093-606b8ee1f6f0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274455, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.045229] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.109s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1412.045796] env[69328]: DEBUG nova.compute.manager [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Start building networks asynchronously for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1412.232280] env[69328]: DEBUG oslo_vmware.api [None req-9d344437-5ea2-4b51-a093-606b8ee1f6f0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274455, 'name': ReconfigVM_Task, 'duration_secs': 0.420424} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1412.233089] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d344437-5ea2-4b51-a093-606b8ee1f6f0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Reconfigured VM instance instance-0000007e to attach disk [datastore1] volume-39b7488a-243c-4635-b3af-30956e6a514e/volume-39b7488a-243c-4635-b3af-30956e6a514e.vmdk or device None with type thin {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1412.237179] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fd9cbe7d-c21b-412d-92d6-5b76aef7c103 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.251513] env[69328]: DEBUG oslo_vmware.api [None req-9d344437-5ea2-4b51-a093-606b8ee1f6f0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Waiting for the task: (returnval){ [ 1412.251513] env[69328]: value = "task-3274456" [ 1412.251513] env[69328]: _type = "Task" [ 1412.251513] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.258869] env[69328]: DEBUG oslo_vmware.api [None req-9d344437-5ea2-4b51-a093-606b8ee1f6f0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274456, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.551565] env[69328]: DEBUG nova.compute.utils [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Using /dev/sd instead of None {{(pid=69328) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1412.552966] env[69328]: DEBUG nova.compute.manager [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Allocating IP information in the background. {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1412.553464] env[69328]: DEBUG nova.network.neutron [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] allocate_for_instance() {{(pid=69328) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1412.599680] env[69328]: DEBUG nova.policy [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a07713f537e84711bc559a085d1e05f1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8bbb75992830459c85c818e850261c61', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69328) authorize /opt/stack/nova/nova/policy.py:192}} [ 1412.760642] env[69328]: DEBUG oslo_vmware.api [None req-9d344437-5ea2-4b51-a093-606b8ee1f6f0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274456, 'name': ReconfigVM_Task, 'duration_secs': 0.136841} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1412.760943] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d344437-5ea2-4b51-a093-606b8ee1f6f0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653994', 'volume_id': '39b7488a-243c-4635-b3af-30956e6a514e', 'name': 'volume-39b7488a-243c-4635-b3af-30956e6a514e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7ee57873-8f9a-4bc6-9b88-261cd6239774', 'attached_at': '', 'detached_at': '', 'volume_id': '39b7488a-243c-4635-b3af-30956e6a514e', 'serial': '39b7488a-243c-4635-b3af-30956e6a514e'} {{(pid=69328) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1412.879336] env[69328]: DEBUG nova.network.neutron [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Successfully created port: 91d36c96-504d-42f3-b010-7ec883685b31 {{(pid=69328) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1413.056687] env[69328]: DEBUG nova.compute.manager [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Start building block device mappings for instance. {{(pid=69328) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1413.798579] env[69328]: DEBUG nova.objects.instance [None req-9d344437-5ea2-4b51-a093-606b8ee1f6f0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lazy-loading 'flavor' on Instance uuid 7ee57873-8f9a-4bc6-9b88-261cd6239774 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1414.066739] env[69328]: DEBUG nova.compute.manager [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Start spawning the instance on the hypervisor. {{(pid=69328) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1414.093339] env[69328]: DEBUG nova.virt.hardware [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-03T17:33:39Z,direct_url=,disk_format='vmdk',id=a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='ecd184c6b78b4e4297fb93abb94aa37d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-03T17:33:40Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1414.093608] env[69328]: DEBUG nova.virt.hardware [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1414.093764] env[69328]: DEBUG nova.virt.hardware [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1414.093945] env[69328]: DEBUG nova.virt.hardware [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1414.094096] env[69328]: DEBUG nova.virt.hardware [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1414.094244] env[69328]: DEBUG nova.virt.hardware [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1414.094447] env[69328]: DEBUG nova.virt.hardware [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1414.094601] env[69328]: DEBUG nova.virt.hardware [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1414.094791] env[69328]: DEBUG nova.virt.hardware [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1414.094910] env[69328]: DEBUG nova.virt.hardware [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1414.095387] env[69328]: DEBUG nova.virt.hardware [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1414.096261] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b60c20fe-fe69-4356-9d31-53899d8a8a3f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.105750] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c80f279-3fdf-4850-a6be-818b6b9211a6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.227278] env[69328]: DEBUG nova.compute.manager [req-dca1d782-2725-44dc-ac1c-7beb2390e1b4 req-58a7fa71-4257-4385-b6f7-ea505ac3e0c9 service nova] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Received event network-vif-plugged-91d36c96-504d-42f3-b010-7ec883685b31 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1414.227531] env[69328]: DEBUG oslo_concurrency.lockutils [req-dca1d782-2725-44dc-ac1c-7beb2390e1b4 req-58a7fa71-4257-4385-b6f7-ea505ac3e0c9 service nova] Acquiring lock "bb27c90e-af74-4bd4-9aa4-cc0d12e305e1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1414.227690] env[69328]: DEBUG oslo_concurrency.lockutils [req-dca1d782-2725-44dc-ac1c-7beb2390e1b4 req-58a7fa71-4257-4385-b6f7-ea505ac3e0c9 service nova] Lock "bb27c90e-af74-4bd4-9aa4-cc0d12e305e1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1414.227862] env[69328]: DEBUG oslo_concurrency.lockutils [req-dca1d782-2725-44dc-ac1c-7beb2390e1b4 req-58a7fa71-4257-4385-b6f7-ea505ac3e0c9 service nova] Lock "bb27c90e-af74-4bd4-9aa4-cc0d12e305e1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1414.228035] env[69328]: DEBUG nova.compute.manager [req-dca1d782-2725-44dc-ac1c-7beb2390e1b4 req-58a7fa71-4257-4385-b6f7-ea505ac3e0c9 service nova] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] No waiting events found dispatching network-vif-plugged-91d36c96-504d-42f3-b010-7ec883685b31 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1414.228185] env[69328]: WARNING nova.compute.manager [req-dca1d782-2725-44dc-ac1c-7beb2390e1b4 req-58a7fa71-4257-4385-b6f7-ea505ac3e0c9 service nova] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Received unexpected event network-vif-plugged-91d36c96-504d-42f3-b010-7ec883685b31 for instance with vm_state building and task_state spawning. [ 1414.303437] env[69328]: DEBUG oslo_concurrency.lockutils [None req-9d344437-5ea2-4b51-a093-606b8ee1f6f0 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lock "7ee57873-8f9a-4bc6-9b88-261cd6239774" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.248s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1414.315772] env[69328]: DEBUG nova.network.neutron [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Successfully updated port: 91d36c96-504d-42f3-b010-7ec883685b31 {{(pid=69328) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1414.582094] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ca11b2cd-c342-4640-96e5-e3e94686ba15 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Acquiring lock "7ee57873-8f9a-4bc6-9b88-261cd6239774" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1414.582378] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ca11b2cd-c342-4640-96e5-e3e94686ba15 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lock "7ee57873-8f9a-4bc6-9b88-261cd6239774" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1414.818607] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquiring lock "refresh_cache-bb27c90e-af74-4bd4-9aa4-cc0d12e305e1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1414.818762] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquired lock "refresh_cache-bb27c90e-af74-4bd4-9aa4-cc0d12e305e1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1414.818875] env[69328]: DEBUG nova.network.neutron [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1415.085845] env[69328]: INFO nova.compute.manager [None req-ca11b2cd-c342-4640-96e5-e3e94686ba15 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Detaching volume 2a193f12-5510-489f-b53b-c8a4f6a0a028 [ 1415.118987] env[69328]: INFO nova.virt.block_device [None req-ca11b2cd-c342-4640-96e5-e3e94686ba15 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Attempting to driver detach volume 2a193f12-5510-489f-b53b-c8a4f6a0a028 from mountpoint /dev/sdb [ 1415.119241] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca11b2cd-c342-4640-96e5-e3e94686ba15 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Volume detach. Driver type: vmdk {{(pid=69328) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1415.119477] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca11b2cd-c342-4640-96e5-e3e94686ba15 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653993', 'volume_id': '2a193f12-5510-489f-b53b-c8a4f6a0a028', 'name': 'volume-2a193f12-5510-489f-b53b-c8a4f6a0a028', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7ee57873-8f9a-4bc6-9b88-261cd6239774', 'attached_at': '', 'detached_at': '', 'volume_id': '2a193f12-5510-489f-b53b-c8a4f6a0a028', 'serial': '2a193f12-5510-489f-b53b-c8a4f6a0a028'} {{(pid=69328) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1415.120361] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb9e0b4a-a442-4fdf-b203-28a941c7c743 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.144856] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77e0e74b-82d1-41ac-b651-cf53c9865b42 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.151343] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfbb66ad-7b17-4859-8eb4-162fbb069d10 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.174212] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2061118-2c68-492e-8a80-651cae1fdaa1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.188134] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca11b2cd-c342-4640-96e5-e3e94686ba15 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] The volume has not been displaced from its original location: [datastore1] volume-2a193f12-5510-489f-b53b-c8a4f6a0a028/volume-2a193f12-5510-489f-b53b-c8a4f6a0a028.vmdk. No consolidation needed. {{(pid=69328) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1415.193262] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca11b2cd-c342-4640-96e5-e3e94686ba15 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Reconfiguring VM instance instance-0000007e to detach disk 2001 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1415.193524] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-738f017c-b6db-425d-b7bb-073c74f51955 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.210965] env[69328]: DEBUG oslo_vmware.api [None req-ca11b2cd-c342-4640-96e5-e3e94686ba15 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Waiting for the task: (returnval){ [ 1415.210965] env[69328]: value = "task-3274457" [ 1415.210965] env[69328]: _type = "Task" [ 1415.210965] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.218077] env[69328]: DEBUG oslo_vmware.api [None req-ca11b2cd-c342-4640-96e5-e3e94686ba15 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274457, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.350294] env[69328]: DEBUG nova.network.neutron [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Instance cache missing network info. {{(pid=69328) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1415.470974] env[69328]: DEBUG nova.network.neutron [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Updating instance_info_cache with network_info: [{"id": "91d36c96-504d-42f3-b010-7ec883685b31", "address": "fa:16:3e:94:8e:92", "network": {"id": "238bfce5-9117-4d8e-8dd7-445d8d894599", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-231958017-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8bbb75992830459c85c818e850261c61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3d7e184-c87f-47a5-8d0d-9fa20e07e669", "external-id": "nsx-vlan-transportzone-746", "segmentation_id": 746, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91d36c96-50", "ovs_interfaceid": "91d36c96-504d-42f3-b010-7ec883685b31", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1415.720904] env[69328]: DEBUG oslo_vmware.api [None req-ca11b2cd-c342-4640-96e5-e3e94686ba15 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274457, 'name': ReconfigVM_Task, 'duration_secs': 0.232592} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.721138] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca11b2cd-c342-4640-96e5-e3e94686ba15 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Reconfigured VM instance instance-0000007e to detach disk 2001 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1415.725685] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-52f33c91-119e-46f2-98d7-85eafcce712a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.740704] env[69328]: DEBUG oslo_vmware.api [None req-ca11b2cd-c342-4640-96e5-e3e94686ba15 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Waiting for the task: (returnval){ [ 1415.740704] env[69328]: value = "task-3274458" [ 1415.740704] env[69328]: _type = "Task" [ 1415.740704] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.748150] env[69328]: DEBUG oslo_vmware.api [None req-ca11b2cd-c342-4640-96e5-e3e94686ba15 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274458, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.974227] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Releasing lock "refresh_cache-bb27c90e-af74-4bd4-9aa4-cc0d12e305e1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1415.974521] env[69328]: DEBUG nova.compute.manager [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Instance network_info: |[{"id": "91d36c96-504d-42f3-b010-7ec883685b31", "address": "fa:16:3e:94:8e:92", "network": {"id": "238bfce5-9117-4d8e-8dd7-445d8d894599", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-231958017-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8bbb75992830459c85c818e850261c61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3d7e184-c87f-47a5-8d0d-9fa20e07e669", "external-id": "nsx-vlan-transportzone-746", "segmentation_id": 746, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91d36c96-50", "ovs_interfaceid": "91d36c96-504d-42f3-b010-7ec883685b31", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69328) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1415.974953] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:94:8e:92', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f3d7e184-c87f-47a5-8d0d-9fa20e07e669', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '91d36c96-504d-42f3-b010-7ec883685b31', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1415.982593] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1415.982796] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1415.983028] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4ad0d822-db69-4be0-b2ee-2dddaaebfa15 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.002741] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1416.002741] env[69328]: value = "task-3274459" [ 1416.002741] env[69328]: _type = "Task" [ 1416.002741] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1416.009852] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274459, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.250687] env[69328]: DEBUG oslo_vmware.api [None req-ca11b2cd-c342-4640-96e5-e3e94686ba15 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274458, 'name': ReconfigVM_Task, 'duration_secs': 0.129729} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1416.250993] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca11b2cd-c342-4640-96e5-e3e94686ba15 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653993', 'volume_id': '2a193f12-5510-489f-b53b-c8a4f6a0a028', 'name': 'volume-2a193f12-5510-489f-b53b-c8a4f6a0a028', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7ee57873-8f9a-4bc6-9b88-261cd6239774', 'attached_at': '', 'detached_at': '', 'volume_id': '2a193f12-5510-489f-b53b-c8a4f6a0a028', 'serial': '2a193f12-5510-489f-b53b-c8a4f6a0a028'} {{(pid=69328) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1416.255909] env[69328]: DEBUG nova.compute.manager [req-d0ba2381-b5f7-4654-bd54-22ca93f4284d req-6e0ac028-c879-4ee5-b450-5b5fc0d796ef service nova] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Received event network-changed-91d36c96-504d-42f3-b010-7ec883685b31 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1416.256107] env[69328]: DEBUG nova.compute.manager [req-d0ba2381-b5f7-4654-bd54-22ca93f4284d req-6e0ac028-c879-4ee5-b450-5b5fc0d796ef service nova] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Refreshing instance network info cache due to event network-changed-91d36c96-504d-42f3-b010-7ec883685b31. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1416.256326] env[69328]: DEBUG oslo_concurrency.lockutils [req-d0ba2381-b5f7-4654-bd54-22ca93f4284d req-6e0ac028-c879-4ee5-b450-5b5fc0d796ef service nova] Acquiring lock "refresh_cache-bb27c90e-af74-4bd4-9aa4-cc0d12e305e1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1416.256557] env[69328]: DEBUG oslo_concurrency.lockutils [req-d0ba2381-b5f7-4654-bd54-22ca93f4284d req-6e0ac028-c879-4ee5-b450-5b5fc0d796ef service nova] Acquired lock "refresh_cache-bb27c90e-af74-4bd4-9aa4-cc0d12e305e1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1416.256687] env[69328]: DEBUG nova.network.neutron [req-d0ba2381-b5f7-4654-bd54-22ca93f4284d req-6e0ac028-c879-4ee5-b450-5b5fc0d796ef service nova] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Refreshing network info cache for port 91d36c96-504d-42f3-b010-7ec883685b31 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1416.512581] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274459, 'name': CreateVM_Task, 'duration_secs': 0.31871} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1416.514036] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1416.514036] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1416.514036] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1416.514215] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1416.514419] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20f5647e-ba44-4092-ad43-66c8febab577 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.518977] env[69328]: DEBUG oslo_vmware.api [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1416.518977] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5278b7d7-ab0c-95fe-a50e-5e6ae2505d96" [ 1416.518977] env[69328]: _type = "Task" [ 1416.518977] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1416.527972] env[69328]: DEBUG oslo_vmware.api [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5278b7d7-ab0c-95fe-a50e-5e6ae2505d96, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.791833] env[69328]: DEBUG nova.objects.instance [None req-ca11b2cd-c342-4640-96e5-e3e94686ba15 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lazy-loading 'flavor' on Instance uuid 7ee57873-8f9a-4bc6-9b88-261cd6239774 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1416.972987] env[69328]: DEBUG nova.network.neutron [req-d0ba2381-b5f7-4654-bd54-22ca93f4284d req-6e0ac028-c879-4ee5-b450-5b5fc0d796ef service nova] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Updated VIF entry in instance network info cache for port 91d36c96-504d-42f3-b010-7ec883685b31. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1416.973373] env[69328]: DEBUG nova.network.neutron [req-d0ba2381-b5f7-4654-bd54-22ca93f4284d req-6e0ac028-c879-4ee5-b450-5b5fc0d796ef service nova] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Updating instance_info_cache with network_info: [{"id": "91d36c96-504d-42f3-b010-7ec883685b31", "address": "fa:16:3e:94:8e:92", "network": {"id": "238bfce5-9117-4d8e-8dd7-445d8d894599", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-231958017-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8bbb75992830459c85c818e850261c61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3d7e184-c87f-47a5-8d0d-9fa20e07e669", "external-id": "nsx-vlan-transportzone-746", "segmentation_id": 746, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91d36c96-50", "ovs_interfaceid": "91d36c96-504d-42f3-b010-7ec883685b31", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1417.029522] env[69328]: DEBUG oslo_vmware.api [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5278b7d7-ab0c-95fe-a50e-5e6ae2505d96, 'name': SearchDatastore_Task, 'duration_secs': 0.013199} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.029811] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1417.030053] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Processing image a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1417.030337] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1417.030511] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1417.030696] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1417.030935] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-35ca73cb-5def-4f93-986c-beb382c01dc1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.039877] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1417.040030] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1417.040702] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6dc32805-2dcf-4037-892e-7dff1a32d4f5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.045763] env[69328]: DEBUG oslo_vmware.api [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1417.045763] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]527e5dc2-3123-68b8-e131-e7dffa360dea" [ 1417.045763] env[69328]: _type = "Task" [ 1417.045763] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.052948] env[69328]: DEBUG oslo_vmware.api [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]527e5dc2-3123-68b8-e131-e7dffa360dea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.476669] env[69328]: DEBUG oslo_concurrency.lockutils [req-d0ba2381-b5f7-4654-bd54-22ca93f4284d req-6e0ac028-c879-4ee5-b450-5b5fc0d796ef service nova] Releasing lock "refresh_cache-bb27c90e-af74-4bd4-9aa4-cc0d12e305e1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1417.556065] env[69328]: DEBUG oslo_vmware.api [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]527e5dc2-3123-68b8-e131-e7dffa360dea, 'name': SearchDatastore_Task, 'duration_secs': 0.012734} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.556949] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-deb1373d-ab24-4735-a984-20181a8abfd4 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.562552] env[69328]: DEBUG oslo_vmware.api [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1417.562552] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52155604-67f0-b58f-3fe5-0764ed2c2e07" [ 1417.562552] env[69328]: _type = "Task" [ 1417.562552] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.569964] env[69328]: DEBUG oslo_vmware.api [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52155604-67f0-b58f-3fe5-0764ed2c2e07, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.799760] env[69328]: DEBUG oslo_concurrency.lockutils [None req-ca11b2cd-c342-4640-96e5-e3e94686ba15 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lock "7ee57873-8f9a-4bc6-9b88-261cd6239774" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.217s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1417.827974] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8b097aad-7855-4919-8570-34b556f2f213 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Acquiring lock "7ee57873-8f9a-4bc6-9b88-261cd6239774" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1417.828192] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8b097aad-7855-4919-8570-34b556f2f213 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lock "7ee57873-8f9a-4bc6-9b88-261cd6239774" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1418.073369] env[69328]: DEBUG oslo_vmware.api [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52155604-67f0-b58f-3fe5-0764ed2c2e07, 'name': SearchDatastore_Task, 'duration_secs': 0.010857} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.073683] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1418.073944] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] bb27c90e-af74-4bd4-9aa4-cc0d12e305e1/bb27c90e-af74-4bd4-9aa4-cc0d12e305e1.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1418.074212] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-db612714-730f-480c-920e-a40280a91036 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.081662] env[69328]: DEBUG oslo_vmware.api [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1418.081662] env[69328]: value = "task-3274460" [ 1418.081662] env[69328]: _type = "Task" [ 1418.081662] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.089342] env[69328]: DEBUG oslo_vmware.api [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274460, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.331659] env[69328]: INFO nova.compute.manager [None req-8b097aad-7855-4919-8570-34b556f2f213 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Detaching volume 39b7488a-243c-4635-b3af-30956e6a514e [ 1418.369432] env[69328]: INFO nova.virt.block_device [None req-8b097aad-7855-4919-8570-34b556f2f213 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Attempting to driver detach volume 39b7488a-243c-4635-b3af-30956e6a514e from mountpoint /dev/sdc [ 1418.369772] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b097aad-7855-4919-8570-34b556f2f213 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Volume detach. Driver type: vmdk {{(pid=69328) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1418.369983] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b097aad-7855-4919-8570-34b556f2f213 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653994', 'volume_id': '39b7488a-243c-4635-b3af-30956e6a514e', 'name': 'volume-39b7488a-243c-4635-b3af-30956e6a514e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7ee57873-8f9a-4bc6-9b88-261cd6239774', 'attached_at': '', 'detached_at': '', 'volume_id': '39b7488a-243c-4635-b3af-30956e6a514e', 'serial': '39b7488a-243c-4635-b3af-30956e6a514e'} {{(pid=69328) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1418.371052] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ca678fb-6f61-46bc-84d0-d81eada43f00 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.397266] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76cac76d-d470-4cac-8392-a6155df3cb4a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.405300] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db53bdd9-2a69-49a5-8c47-d8051f49ae16 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.427901] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7936c956-eeaf-447d-a705-95b213711e6d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.444434] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b097aad-7855-4919-8570-34b556f2f213 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] The volume has not been displaced from its original location: [datastore1] volume-39b7488a-243c-4635-b3af-30956e6a514e/volume-39b7488a-243c-4635-b3af-30956e6a514e.vmdk. No consolidation needed. {{(pid=69328) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1418.450312] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b097aad-7855-4919-8570-34b556f2f213 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Reconfiguring VM instance instance-0000007e to detach disk 2002 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1418.450730] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e471f6a0-40d9-4bdf-ae66-48ac3370975c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.470586] env[69328]: DEBUG oslo_vmware.api [None req-8b097aad-7855-4919-8570-34b556f2f213 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Waiting for the task: (returnval){ [ 1418.470586] env[69328]: value = "task-3274461" [ 1418.470586] env[69328]: _type = "Task" [ 1418.470586] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.479802] env[69328]: DEBUG oslo_vmware.api [None req-8b097aad-7855-4919-8570-34b556f2f213 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274461, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.591185] env[69328]: DEBUG oslo_vmware.api [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274460, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.440589} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.591378] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318/a6ec8ae9-c6b4-407a-8d93-a6cecc0eb318.vmdk to [datastore1] bb27c90e-af74-4bd4-9aa4-cc0d12e305e1/bb27c90e-af74-4bd4-9aa4-cc0d12e305e1.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1418.591582] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Extending root virtual disk to 1048576 {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1418.591823] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8092972b-9127-40a0-972b-51fbd7fe49b5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.598417] env[69328]: DEBUG oslo_vmware.api [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1418.598417] env[69328]: value = "task-3274462" [ 1418.598417] env[69328]: _type = "Task" [ 1418.598417] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.605453] env[69328]: DEBUG oslo_vmware.api [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274462, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.980608] env[69328]: DEBUG oslo_vmware.api [None req-8b097aad-7855-4919-8570-34b556f2f213 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274461, 'name': ReconfigVM_Task, 'duration_secs': 0.240435} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.980876] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b097aad-7855-4919-8570-34b556f2f213 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Reconfigured VM instance instance-0000007e to detach disk 2002 {{(pid=69328) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1418.985328] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-711c27c4-59a7-4ff9-a3c3-1f3fd0abf944 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.998908] env[69328]: DEBUG oslo_vmware.api [None req-8b097aad-7855-4919-8570-34b556f2f213 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Waiting for the task: (returnval){ [ 1418.998908] env[69328]: value = "task-3274463" [ 1418.998908] env[69328]: _type = "Task" [ 1418.998908] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.006646] env[69328]: DEBUG oslo_vmware.api [None req-8b097aad-7855-4919-8570-34b556f2f213 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274463, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.107912] env[69328]: DEBUG oslo_vmware.api [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274462, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069098} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.108284] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Extended root virtual disk {{(pid=69328) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1419.109336] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b272d84d-1212-43db-bd43-7212a8ae8f9d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.131780] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Reconfiguring VM instance instance-0000007f to attach disk [datastore1] bb27c90e-af74-4bd4-9aa4-cc0d12e305e1/bb27c90e-af74-4bd4-9aa4-cc0d12e305e1.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1419.132012] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e9701fa8-6922-44b4-beae-b4a1506ce68f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.151031] env[69328]: DEBUG oslo_vmware.api [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1419.151031] env[69328]: value = "task-3274464" [ 1419.151031] env[69328]: _type = "Task" [ 1419.151031] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.810877] env[69328]: DEBUG oslo_vmware.api [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274464, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.811269] env[69328]: WARNING oslo_vmware.common.loopingcall [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] task run outlasted interval by 0.15980899999999998 sec [ 1419.821931] env[69328]: DEBUG oslo_vmware.api [None req-8b097aad-7855-4919-8570-34b556f2f213 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274463, 'name': ReconfigVM_Task, 'duration_secs': 0.138392} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.824742] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b097aad-7855-4919-8570-34b556f2f213 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-653994', 'volume_id': '39b7488a-243c-4635-b3af-30956e6a514e', 'name': 'volume-39b7488a-243c-4635-b3af-30956e6a514e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7ee57873-8f9a-4bc6-9b88-261cd6239774', 'attached_at': '', 'detached_at': '', 'volume_id': '39b7488a-243c-4635-b3af-30956e6a514e', 'serial': '39b7488a-243c-4635-b3af-30956e6a514e'} {{(pid=69328) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1419.826721] env[69328]: DEBUG oslo_vmware.api [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274464, 'name': ReconfigVM_Task, 'duration_secs': 0.303224} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.827140] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Reconfigured VM instance instance-0000007f to attach disk [datastore1] bb27c90e-af74-4bd4-9aa4-cc0d12e305e1/bb27c90e-af74-4bd4-9aa4-cc0d12e305e1.vmdk or device None with type sparse {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1419.827730] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-80af8e7e-65e2-419c-b7bd-22a95ab54a74 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.834050] env[69328]: DEBUG oslo_vmware.api [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1419.834050] env[69328]: value = "task-3274465" [ 1419.834050] env[69328]: _type = "Task" [ 1419.834050] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.842262] env[69328]: DEBUG oslo_vmware.api [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274465, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.344243] env[69328]: DEBUG oslo_vmware.api [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274465, 'name': Rename_Task, 'duration_secs': 0.148119} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.344521] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1420.344760] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9ca9e136-7a27-4f5c-a9f2-ed40327e29e9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.351351] env[69328]: DEBUG oslo_vmware.api [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1420.351351] env[69328]: value = "task-3274466" [ 1420.351351] env[69328]: _type = "Task" [ 1420.351351] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.358794] env[69328]: DEBUG oslo_vmware.api [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274466, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.366537] env[69328]: DEBUG nova.objects.instance [None req-8b097aad-7855-4919-8570-34b556f2f213 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lazy-loading 'flavor' on Instance uuid 7ee57873-8f9a-4bc6-9b88-261cd6239774 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1420.860972] env[69328]: DEBUG oslo_vmware.api [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274466, 'name': PowerOnVM_Task, 'duration_secs': 0.47434} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.862297] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1420.862297] env[69328]: INFO nova.compute.manager [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Took 6.79 seconds to spawn the instance on the hypervisor. [ 1420.862297] env[69328]: DEBUG nova.compute.manager [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1420.862837] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d9ef33e-5ae1-479f-ad3e-5e37efc9b0cd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.377369] env[69328]: DEBUG oslo_concurrency.lockutils [None req-8b097aad-7855-4919-8570-34b556f2f213 tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lock "7ee57873-8f9a-4bc6-9b88-261cd6239774" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.549s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1421.381834] env[69328]: INFO nova.compute.manager [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Took 11.46 seconds to build instance. [ 1421.882020] env[69328]: DEBUG oslo_concurrency.lockutils [None req-6298ba56-2874-42e2-a354-174c594c6e8a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "bb27c90e-af74-4bd4-9aa4-cc0d12e305e1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.972s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1421.954017] env[69328]: DEBUG nova.compute.manager [req-df3276a4-08c4-42f3-8d6f-809a5b3d772d req-d648d8d0-2049-4a31-89e3-a865f75b3827 service nova] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Received event network-changed-91d36c96-504d-42f3-b010-7ec883685b31 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1421.954227] env[69328]: DEBUG nova.compute.manager [req-df3276a4-08c4-42f3-8d6f-809a5b3d772d req-d648d8d0-2049-4a31-89e3-a865f75b3827 service nova] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Refreshing instance network info cache due to event network-changed-91d36c96-504d-42f3-b010-7ec883685b31. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1421.954440] env[69328]: DEBUG oslo_concurrency.lockutils [req-df3276a4-08c4-42f3-8d6f-809a5b3d772d req-d648d8d0-2049-4a31-89e3-a865f75b3827 service nova] Acquiring lock "refresh_cache-bb27c90e-af74-4bd4-9aa4-cc0d12e305e1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1421.954584] env[69328]: DEBUG oslo_concurrency.lockutils [req-df3276a4-08c4-42f3-8d6f-809a5b3d772d req-d648d8d0-2049-4a31-89e3-a865f75b3827 service nova] Acquired lock "refresh_cache-bb27c90e-af74-4bd4-9aa4-cc0d12e305e1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1421.954743] env[69328]: DEBUG nova.network.neutron [req-df3276a4-08c4-42f3-8d6f-809a5b3d772d req-d648d8d0-2049-4a31-89e3-a865f75b3827 service nova] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Refreshing network info cache for port 91d36c96-504d-42f3-b010-7ec883685b31 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1422.237458] env[69328]: DEBUG oslo_concurrency.lockutils [None req-749b2677-5479-4039-89aa-158062fb3ffd tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Acquiring lock "7ee57873-8f9a-4bc6-9b88-261cd6239774" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1422.237745] env[69328]: DEBUG oslo_concurrency.lockutils [None req-749b2677-5479-4039-89aa-158062fb3ffd tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lock "7ee57873-8f9a-4bc6-9b88-261cd6239774" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1422.237957] env[69328]: DEBUG oslo_concurrency.lockutils [None req-749b2677-5479-4039-89aa-158062fb3ffd tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Acquiring lock "7ee57873-8f9a-4bc6-9b88-261cd6239774-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1422.238157] env[69328]: DEBUG oslo_concurrency.lockutils [None req-749b2677-5479-4039-89aa-158062fb3ffd tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lock "7ee57873-8f9a-4bc6-9b88-261cd6239774-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1422.238339] env[69328]: DEBUG oslo_concurrency.lockutils [None req-749b2677-5479-4039-89aa-158062fb3ffd tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lock "7ee57873-8f9a-4bc6-9b88-261cd6239774-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1422.240475] env[69328]: INFO nova.compute.manager [None req-749b2677-5479-4039-89aa-158062fb3ffd tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Terminating instance [ 1422.653327] env[69328]: DEBUG nova.network.neutron [req-df3276a4-08c4-42f3-8d6f-809a5b3d772d req-d648d8d0-2049-4a31-89e3-a865f75b3827 service nova] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Updated VIF entry in instance network info cache for port 91d36c96-504d-42f3-b010-7ec883685b31. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1422.653695] env[69328]: DEBUG nova.network.neutron [req-df3276a4-08c4-42f3-8d6f-809a5b3d772d req-d648d8d0-2049-4a31-89e3-a865f75b3827 service nova] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Updating instance_info_cache with network_info: [{"id": "91d36c96-504d-42f3-b010-7ec883685b31", "address": "fa:16:3e:94:8e:92", "network": {"id": "238bfce5-9117-4d8e-8dd7-445d8d894599", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-231958017-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8bbb75992830459c85c818e850261c61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3d7e184-c87f-47a5-8d0d-9fa20e07e669", "external-id": "nsx-vlan-transportzone-746", "segmentation_id": 746, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91d36c96-50", "ovs_interfaceid": "91d36c96-504d-42f3-b010-7ec883685b31", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1422.744317] env[69328]: DEBUG nova.compute.manager [None req-749b2677-5479-4039-89aa-158062fb3ffd tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1422.744549] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-749b2677-5479-4039-89aa-158062fb3ffd tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1422.745476] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99dae4f7-352e-4fe1-9781-3c553b70b0c9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.753454] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-749b2677-5479-4039-89aa-158062fb3ffd tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1422.753696] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f7fa87f6-e33c-4f81-b7e0-49ba856d7560 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.761136] env[69328]: DEBUG oslo_vmware.api [None req-749b2677-5479-4039-89aa-158062fb3ffd tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Waiting for the task: (returnval){ [ 1422.761136] env[69328]: value = "task-3274467" [ 1422.761136] env[69328]: _type = "Task" [ 1422.761136] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.768853] env[69328]: DEBUG oslo_vmware.api [None req-749b2677-5479-4039-89aa-158062fb3ffd tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274467, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.156508] env[69328]: DEBUG oslo_concurrency.lockutils [req-df3276a4-08c4-42f3-8d6f-809a5b3d772d req-d648d8d0-2049-4a31-89e3-a865f75b3827 service nova] Releasing lock "refresh_cache-bb27c90e-af74-4bd4-9aa4-cc0d12e305e1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1423.270281] env[69328]: DEBUG oslo_vmware.api [None req-749b2677-5479-4039-89aa-158062fb3ffd tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274467, 'name': PowerOffVM_Task, 'duration_secs': 0.158292} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1423.270489] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-749b2677-5479-4039-89aa-158062fb3ffd tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1423.270656] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-749b2677-5479-4039-89aa-158062fb3ffd tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1423.270893] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bd76e9fe-d025-49c6-a89e-c7c25e3dedce {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.333773] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-749b2677-5479-4039-89aa-158062fb3ffd tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1423.334015] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-749b2677-5479-4039-89aa-158062fb3ffd tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Deleting contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1423.334207] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-749b2677-5479-4039-89aa-158062fb3ffd tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Deleting the datastore file [datastore2] 7ee57873-8f9a-4bc6-9b88-261cd6239774 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1423.334471] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f45f56c6-fc90-432d-9585-3724315c3e94 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.341125] env[69328]: DEBUG oslo_vmware.api [None req-749b2677-5479-4039-89aa-158062fb3ffd tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Waiting for the task: (returnval){ [ 1423.341125] env[69328]: value = "task-3274469" [ 1423.341125] env[69328]: _type = "Task" [ 1423.341125] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1423.349531] env[69328]: DEBUG oslo_vmware.api [None req-749b2677-5479-4039-89aa-158062fb3ffd tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274469, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.851049] env[69328]: DEBUG oslo_vmware.api [None req-749b2677-5479-4039-89aa-158062fb3ffd tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Task: {'id': task-3274469, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.149602} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1423.851280] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-749b2677-5479-4039-89aa-158062fb3ffd tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1423.851465] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-749b2677-5479-4039-89aa-158062fb3ffd tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Deleted contents of the VM from datastore datastore2 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1423.851641] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-749b2677-5479-4039-89aa-158062fb3ffd tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1423.851820] env[69328]: INFO nova.compute.manager [None req-749b2677-5479-4039-89aa-158062fb3ffd tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1423.852067] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-749b2677-5479-4039-89aa-158062fb3ffd tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1423.852264] env[69328]: DEBUG nova.compute.manager [-] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1423.852357] env[69328]: DEBUG nova.network.neutron [-] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1424.244339] env[69328]: DEBUG nova.compute.manager [req-a2017351-fb28-4899-8d34-d79384ca73f4 req-19d5091e-46a3-42ae-8987-871bfc226c03 service nova] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Received event network-vif-deleted-e3c363b2-cdba-41b8-b6f5-150b083f2ec8 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1424.244587] env[69328]: INFO nova.compute.manager [req-a2017351-fb28-4899-8d34-d79384ca73f4 req-19d5091e-46a3-42ae-8987-871bfc226c03 service nova] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Neutron deleted interface e3c363b2-cdba-41b8-b6f5-150b083f2ec8; detaching it from the instance and deleting it from the info cache [ 1424.244705] env[69328]: DEBUG nova.network.neutron [req-a2017351-fb28-4899-8d34-d79384ca73f4 req-19d5091e-46a3-42ae-8987-871bfc226c03 service nova] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1424.726588] env[69328]: DEBUG nova.network.neutron [-] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1424.747053] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d5d361c8-52cd-439f-b5e0-a8683276f5f3 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.756743] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ce529b1-024a-4449-ac9d-db60872a433e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.781958] env[69328]: DEBUG nova.compute.manager [req-a2017351-fb28-4899-8d34-d79384ca73f4 req-19d5091e-46a3-42ae-8987-871bfc226c03 service nova] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Detach interface failed, port_id=e3c363b2-cdba-41b8-b6f5-150b083f2ec8, reason: Instance 7ee57873-8f9a-4bc6-9b88-261cd6239774 could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1425.228764] env[69328]: INFO nova.compute.manager [-] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Took 1.38 seconds to deallocate network for instance. [ 1425.735139] env[69328]: DEBUG oslo_concurrency.lockutils [None req-749b2677-5479-4039-89aa-158062fb3ffd tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1425.735574] env[69328]: DEBUG oslo_concurrency.lockutils [None req-749b2677-5479-4039-89aa-158062fb3ffd tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1425.735634] env[69328]: DEBUG nova.objects.instance [None req-749b2677-5479-4039-89aa-158062fb3ffd tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lazy-loading 'resources' on Instance uuid 7ee57873-8f9a-4bc6-9b88-261cd6239774 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1426.278228] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d76f711b-7a73-46ca-b4d8-831963912745 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.285738] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8ee97c9-79cf-4843-892d-c9939858688a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.314505] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-327523a4-9d32-40aa-920b-992d61534162 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.321673] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f59ff24-cdcf-4323-8f81-fa1f9b6cb90f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.334105] env[69328]: DEBUG nova.compute.provider_tree [None req-749b2677-5479-4039-89aa-158062fb3ffd tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1426.837837] env[69328]: DEBUG nova.scheduler.client.report [None req-749b2677-5479-4039-89aa-158062fb3ffd tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1427.342852] env[69328]: DEBUG oslo_concurrency.lockutils [None req-749b2677-5479-4039-89aa-158062fb3ffd tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.607s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1427.362050] env[69328]: INFO nova.scheduler.client.report [None req-749b2677-5479-4039-89aa-158062fb3ffd tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Deleted allocations for instance 7ee57873-8f9a-4bc6-9b88-261cd6239774 [ 1427.870083] env[69328]: DEBUG oslo_concurrency.lockutils [None req-749b2677-5479-4039-89aa-158062fb3ffd tempest-AttachVolumeTestJSON-860956265 tempest-AttachVolumeTestJSON-860956265-project-member] Lock "7ee57873-8f9a-4bc6-9b88-261cd6239774" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.632s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1433.631936] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1433.632300] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Cleaning up deleted instances with incomplete migration {{(pid=69328) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11872}} [ 1437.133616] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager.update_available_resource {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1437.637359] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1437.637635] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1437.637812] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1437.637969] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69328) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1437.638911] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bad002ff-f27c-439c-be7d-46d81a0619f0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.647739] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5408d0b0-19d0-4bac-a8a7-794361593da5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.661216] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f392a8e4-14b4-492e-ac32-4c56e5cdbca7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.667530] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4593948d-0237-4730-a0dd-6f38de105949 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.695991] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180708MB free_disk=116GB free_vcpus=48 pci_devices=None {{(pid=69328) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1437.695991] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1437.696115] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1438.721097] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Instance bb27c90e-af74-4bd4-9aa4-cc0d12e305e1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69328) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1438.721337] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=69328) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1438.721458] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=69328) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1438.749308] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2b6a5e4-b0f3-4f66-8618-d2e2214ab673 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.756509] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ec0a3d1-3c3f-47a5-884c-b0929a6e521a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.785518] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b562d099-ad5a-4206-9083-0dad414eeb64 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.792305] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f153a80f-63ed-4b75-b579-a1459db14424 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.804897] env[69328]: DEBUG nova.compute.provider_tree [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1439.308257] env[69328]: DEBUG nova.scheduler.client.report [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1439.812714] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69328) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1439.813015] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.117s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1441.310419] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1441.310803] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1441.310803] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1441.311049] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1441.311143] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69328) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1441.631893] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1441.631893] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1442.631528] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1444.631911] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1444.632273] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Cleaning up deleted instances {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11834}} [ 1445.141233] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] There are 28 instances to clean {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11843}} [ 1445.141470] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 7ee57873-8f9a-4bc6-9b88-261cd6239774] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1445.644400] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 1a0e084a-f7b2-4f2e-b508-33caeed2ffeb] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1446.148331] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 55107d36-c16b-43f9-b436-0de8d9dfd0ca] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1446.651593] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 28d608b8-c06a-4e71-b3e2-94c63619cec0] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1447.155608] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: d1820cd5-bacb-4097-8d05-fffea8b64e2b] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1447.658996] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 566c3167-4cf2-4236-812f-dfbf30bbaf6f] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1448.162589] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: d19f6a2a-3a16-4031-8c20-143ccfd6f5f5] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1448.666303] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 0cf68559-5f07-4006-9f7f-59027e31635d] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1449.170662] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 53eb70f0-1734-4386-b747-014561ba577b] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1449.674293] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 0c83f194-9346-4e24-a0ea-815d0b454ded] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1450.177629] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 03f0adc8-d640-4248-be9d-ab4ba0cbe760] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1450.681364] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: ff815ffb-3422-469e-9b54-b33502826513] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1451.184983] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 79d66d5d-e1a4-4bc0-8e43-db97153867e3] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1451.688680] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: ae46c18e-15ae-4a47-b05a-a143f10b5ab6] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1452.192735] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 33583ef3-252c-45d4-a514-5646f98c5f45] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1452.696253] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: ee3609ea-0855-47c2-874c-349c80419781] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1453.199419] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 5f1d3c1a-a42a-4b9c-aee8-d3d8e2fc8732] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1453.702516] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: de8e6616-0460-4a6e-918c-a27818da96e2] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1454.206298] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: fb2d04d8-cff6-414c-9d50-3ab61729546d] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1454.709574] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: c1829dcf-3608-4955-bd50-eb9ee27d38e1] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1455.212562] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 275ef1ed-8e60-4151-b548-e22e5bd8efe2] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1455.716449] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: dc63e9d4-fe8b-4894-aac9-9c23f4b7fc34] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1456.220340] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 52c87371-4142-40d6-ac68-804aabd9f823] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1456.724231] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: 1413dcfe-3570-4657-b811-81a1acc159d1] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1457.228131] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: c751ef77-c3be-46cd-b7eb-fe139bf0998b] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1457.731430] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: f1be93b2-08db-41fe-87c4-f4e5f964cfa4] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1458.234973] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: a0b663eb-31b0-4de1-94bc-660a7d9c1c7b] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1458.738525] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] [instance: b0a1441c-81e2-4131-a2ff-f5042d559d9f] Instance has had 0 of 5 cleanup attempts {{(pid=69328) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1459.242548] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1459.781576] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquiring lock "bb27c90e-af74-4bd4-9aa4-cc0d12e305e1" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1459.781910] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "bb27c90e-af74-4bd4-9aa4-cc0d12e305e1" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1459.782118] env[69328]: INFO nova.compute.manager [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Shelving [ 1460.791877] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1460.792234] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-21bc6f75-4a00-463c-b13e-93fff3505bf5 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.799963] env[69328]: DEBUG oslo_vmware.api [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1460.799963] env[69328]: value = "task-3274473" [ 1460.799963] env[69328]: _type = "Task" [ 1460.799963] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1460.807875] env[69328]: DEBUG oslo_vmware.api [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274473, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.310618] env[69328]: DEBUG oslo_vmware.api [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274473, 'name': PowerOffVM_Task, 'duration_secs': 0.206688} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1461.310891] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1461.311656] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76ffdc7f-d32c-4ad9-84a6-65e21e1ec586 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.329906] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03ea687e-baf7-4bfd-bc75-a343ee8b5d8e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.839825] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Creating Snapshot of the VM instance {{(pid=69328) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1461.840168] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-866b18f4-417b-4d50-9c4c-eb0ddf934d22 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.848132] env[69328]: DEBUG oslo_vmware.api [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1461.848132] env[69328]: value = "task-3274474" [ 1461.848132] env[69328]: _type = "Task" [ 1461.848132] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1461.855778] env[69328]: DEBUG oslo_vmware.api [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274474, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.358898] env[69328]: DEBUG oslo_vmware.api [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274474, 'name': CreateSnapshot_Task, 'duration_secs': 0.414286} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1462.359175] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Created Snapshot of the VM instance {{(pid=69328) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1462.359909] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18b40c82-c828-4413-b5e1-a22c059226f2 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.877315] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Creating linked-clone VM from snapshot {{(pid=69328) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1462.877683] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-dbcb3ceb-643e-4d35-ac84-91d74ed79da1 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.886077] env[69328]: DEBUG oslo_vmware.api [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1462.886077] env[69328]: value = "task-3274475" [ 1462.886077] env[69328]: _type = "Task" [ 1462.886077] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.893721] env[69328]: DEBUG oslo_vmware.api [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274475, 'name': CloneVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.397063] env[69328]: DEBUG oslo_vmware.api [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274475, 'name': CloneVM_Task} progress is 94%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.896678] env[69328]: DEBUG oslo_vmware.api [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274475, 'name': CloneVM_Task, 'duration_secs': 0.940726} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1463.897035] env[69328]: INFO nova.virt.vmwareapi.vmops [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Created linked-clone VM from snapshot [ 1463.897726] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f29ce72-3f47-4d6a-9552-af168a10177a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.904642] env[69328]: DEBUG nova.virt.vmwareapi.images [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Uploading image 1b907037-24b5-4744-bd04-6dbc07359b5f {{(pid=69328) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1463.928674] env[69328]: DEBUG oslo_vmware.rw_handles [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1463.928674] env[69328]: value = "vm-653997" [ 1463.928674] env[69328]: _type = "VirtualMachine" [ 1463.928674] env[69328]: }. {{(pid=69328) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1463.928935] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-cca72555-eafa-43ea-9465-aa7be08b492e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.936344] env[69328]: DEBUG oslo_vmware.rw_handles [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lease: (returnval){ [ 1463.936344] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52c970c3-d52d-d0e5-aad4-057a2568cef5" [ 1463.936344] env[69328]: _type = "HttpNfcLease" [ 1463.936344] env[69328]: } obtained for exporting VM: (result){ [ 1463.936344] env[69328]: value = "vm-653997" [ 1463.936344] env[69328]: _type = "VirtualMachine" [ 1463.936344] env[69328]: }. {{(pid=69328) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1463.936574] env[69328]: DEBUG oslo_vmware.api [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the lease: (returnval){ [ 1463.936574] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52c970c3-d52d-d0e5-aad4-057a2568cef5" [ 1463.936574] env[69328]: _type = "HttpNfcLease" [ 1463.936574] env[69328]: } to be ready. {{(pid=69328) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1463.942851] env[69328]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1463.942851] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52c970c3-d52d-d0e5-aad4-057a2568cef5" [ 1463.942851] env[69328]: _type = "HttpNfcLease" [ 1463.942851] env[69328]: } is initializing. {{(pid=69328) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1464.444798] env[69328]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1464.444798] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52c970c3-d52d-d0e5-aad4-057a2568cef5" [ 1464.444798] env[69328]: _type = "HttpNfcLease" [ 1464.444798] env[69328]: } is ready. {{(pid=69328) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1464.445125] env[69328]: DEBUG oslo_vmware.rw_handles [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1464.445125] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52c970c3-d52d-d0e5-aad4-057a2568cef5" [ 1464.445125] env[69328]: _type = "HttpNfcLease" [ 1464.445125] env[69328]: }. {{(pid=69328) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1464.445801] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7936f4a6-428b-4339-ad9d-0787db6f8395 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.453538] env[69328]: DEBUG oslo_vmware.rw_handles [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52606ad2-6804-8552-b55d-b2f4996a27f3/disk-0.vmdk from lease info. {{(pid=69328) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1464.453732] env[69328]: DEBUG oslo_vmware.rw_handles [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52606ad2-6804-8552-b55d-b2f4996a27f3/disk-0.vmdk for reading. {{(pid=69328) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1464.539451] env[69328]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-8101b287-0c9f-4994-9e22-587580f67b98 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.076299] env[69328]: DEBUG oslo_vmware.rw_handles [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52606ad2-6804-8552-b55d-b2f4996a27f3/disk-0.vmdk. {{(pid=69328) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1472.077250] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c2c6fc6-b120-492e-b493-c4095c22af47 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.083510] env[69328]: DEBUG oslo_vmware.rw_handles [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52606ad2-6804-8552-b55d-b2f4996a27f3/disk-0.vmdk is in state: ready. {{(pid=69328) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1472.083687] env[69328]: ERROR oslo_vmware.rw_handles [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52606ad2-6804-8552-b55d-b2f4996a27f3/disk-0.vmdk due to incomplete transfer. [ 1472.083917] env[69328]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-38be8c77-009c-47c9-bfbe-718411f98cea {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.091938] env[69328]: DEBUG oslo_vmware.rw_handles [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52606ad2-6804-8552-b55d-b2f4996a27f3/disk-0.vmdk. {{(pid=69328) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1472.092154] env[69328]: DEBUG nova.virt.vmwareapi.images [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Uploaded image 1b907037-24b5-4744-bd04-6dbc07359b5f to the Glance image server {{(pid=69328) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1472.094260] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Destroying the VM {{(pid=69328) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1472.094481] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-896f9e0e-7fb5-4280-85bc-0cb99af5ce75 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.099414] env[69328]: DEBUG oslo_vmware.api [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1472.099414] env[69328]: value = "task-3274477" [ 1472.099414] env[69328]: _type = "Task" [ 1472.099414] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1472.106980] env[69328]: DEBUG oslo_vmware.api [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274477, 'name': Destroy_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.609404] env[69328]: DEBUG oslo_vmware.api [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274477, 'name': Destroy_Task, 'duration_secs': 0.337203} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1472.609684] env[69328]: INFO nova.virt.vmwareapi.vm_util [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Destroyed the VM [ 1472.609915] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Deleting Snapshot of the VM instance {{(pid=69328) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1472.610185] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-10bcf7b4-795c-4f73-81aa-6ef25d796f92 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.615527] env[69328]: DEBUG oslo_vmware.api [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1472.615527] env[69328]: value = "task-3274478" [ 1472.615527] env[69328]: _type = "Task" [ 1472.615527] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1472.622434] env[69328]: DEBUG oslo_vmware.api [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274478, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.124909] env[69328]: DEBUG oslo_vmware.api [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274478, 'name': RemoveSnapshot_Task, 'duration_secs': 0.344898} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1473.125309] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Deleted Snapshot of the VM instance {{(pid=69328) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1473.125446] env[69328]: DEBUG nova.compute.manager [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1473.126185] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d542522-2e4f-4b84-ab67-21b5e4e4d432 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.637532] env[69328]: INFO nova.compute.manager [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Shelve offloading [ 1474.141646] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1474.141963] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bf898da7-33b8-482d-bbeb-d67dff4fe613 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.149637] env[69328]: DEBUG oslo_vmware.api [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1474.149637] env[69328]: value = "task-3274479" [ 1474.149637] env[69328]: _type = "Task" [ 1474.149637] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.157206] env[69328]: DEBUG oslo_vmware.api [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274479, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.660478] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] VM already powered off {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1474.660702] env[69328]: DEBUG nova.compute.manager [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1474.661465] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d76ab431-f940-435d-a2d9-0bdf330d3ba6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.666813] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquiring lock "refresh_cache-bb27c90e-af74-4bd4-9aa4-cc0d12e305e1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1474.666975] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquired lock "refresh_cache-bb27c90e-af74-4bd4-9aa4-cc0d12e305e1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1474.667157] env[69328]: DEBUG nova.network.neutron [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1475.363155] env[69328]: DEBUG nova.network.neutron [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Updating instance_info_cache with network_info: [{"id": "91d36c96-504d-42f3-b010-7ec883685b31", "address": "fa:16:3e:94:8e:92", "network": {"id": "238bfce5-9117-4d8e-8dd7-445d8d894599", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-231958017-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8bbb75992830459c85c818e850261c61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3d7e184-c87f-47a5-8d0d-9fa20e07e669", "external-id": "nsx-vlan-transportzone-746", "segmentation_id": 746, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91d36c96-50", "ovs_interfaceid": "91d36c96-504d-42f3-b010-7ec883685b31", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1475.865963] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Releasing lock "refresh_cache-bb27c90e-af74-4bd4-9aa4-cc0d12e305e1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1476.060678] env[69328]: DEBUG nova.compute.manager [req-92255623-15b4-4eaf-8856-79630c3d0618 req-0b36bbd5-ee4f-461d-af4e-dedc04939268 service nova] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Received event network-vif-unplugged-91d36c96-504d-42f3-b010-7ec883685b31 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1476.060867] env[69328]: DEBUG oslo_concurrency.lockutils [req-92255623-15b4-4eaf-8856-79630c3d0618 req-0b36bbd5-ee4f-461d-af4e-dedc04939268 service nova] Acquiring lock "bb27c90e-af74-4bd4-9aa4-cc0d12e305e1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1476.061166] env[69328]: DEBUG oslo_concurrency.lockutils [req-92255623-15b4-4eaf-8856-79630c3d0618 req-0b36bbd5-ee4f-461d-af4e-dedc04939268 service nova] Lock "bb27c90e-af74-4bd4-9aa4-cc0d12e305e1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1476.061252] env[69328]: DEBUG oslo_concurrency.lockutils [req-92255623-15b4-4eaf-8856-79630c3d0618 req-0b36bbd5-ee4f-461d-af4e-dedc04939268 service nova] Lock "bb27c90e-af74-4bd4-9aa4-cc0d12e305e1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1476.061417] env[69328]: DEBUG nova.compute.manager [req-92255623-15b4-4eaf-8856-79630c3d0618 req-0b36bbd5-ee4f-461d-af4e-dedc04939268 service nova] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] No waiting events found dispatching network-vif-unplugged-91d36c96-504d-42f3-b010-7ec883685b31 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1476.061582] env[69328]: WARNING nova.compute.manager [req-92255623-15b4-4eaf-8856-79630c3d0618 req-0b36bbd5-ee4f-461d-af4e-dedc04939268 service nova] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Received unexpected event network-vif-unplugged-91d36c96-504d-42f3-b010-7ec883685b31 for instance with vm_state shelved and task_state shelving_offloading. [ 1476.152519] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1476.153459] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75301907-f7aa-4485-b853-29613676f020 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.160897] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1476.161134] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0a17edd8-525c-48a3-8b0b-b8233f28791d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.223939] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1476.224190] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1476.224348] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Deleting the datastore file [datastore1] bb27c90e-af74-4bd4-9aa4-cc0d12e305e1 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1476.224603] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-166a2288-9de9-46a8-835d-78264083cd3a {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.230858] env[69328]: DEBUG oslo_vmware.api [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1476.230858] env[69328]: value = "task-3274481" [ 1476.230858] env[69328]: _type = "Task" [ 1476.230858] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1476.238307] env[69328]: DEBUG oslo_vmware.api [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274481, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.740249] env[69328]: DEBUG oslo_vmware.api [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274481, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.148532} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1476.740592] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1476.740699] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1476.740847] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1476.828892] env[69328]: INFO nova.scheduler.client.report [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Deleted allocations for instance bb27c90e-af74-4bd4-9aa4-cc0d12e305e1 [ 1477.333938] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1477.334266] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1477.334505] env[69328]: DEBUG nova.objects.instance [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lazy-loading 'resources' on Instance uuid bb27c90e-af74-4bd4-9aa4-cc0d12e305e1 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1477.838032] env[69328]: DEBUG nova.objects.instance [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lazy-loading 'numa_topology' on Instance uuid bb27c90e-af74-4bd4-9aa4-cc0d12e305e1 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1478.091696] env[69328]: DEBUG nova.compute.manager [req-59e20462-3224-4e53-b1cc-56712289de94 req-58b7f2f1-6312-4894-b774-e102309ed95b service nova] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Received event network-changed-91d36c96-504d-42f3-b010-7ec883685b31 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1478.091938] env[69328]: DEBUG nova.compute.manager [req-59e20462-3224-4e53-b1cc-56712289de94 req-58b7f2f1-6312-4894-b774-e102309ed95b service nova] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Refreshing instance network info cache due to event network-changed-91d36c96-504d-42f3-b010-7ec883685b31. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1478.092125] env[69328]: DEBUG oslo_concurrency.lockutils [req-59e20462-3224-4e53-b1cc-56712289de94 req-58b7f2f1-6312-4894-b774-e102309ed95b service nova] Acquiring lock "refresh_cache-bb27c90e-af74-4bd4-9aa4-cc0d12e305e1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1478.092272] env[69328]: DEBUG oslo_concurrency.lockutils [req-59e20462-3224-4e53-b1cc-56712289de94 req-58b7f2f1-6312-4894-b774-e102309ed95b service nova] Acquired lock "refresh_cache-bb27c90e-af74-4bd4-9aa4-cc0d12e305e1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1478.092431] env[69328]: DEBUG nova.network.neutron [req-59e20462-3224-4e53-b1cc-56712289de94 req-58b7f2f1-6312-4894-b774-e102309ed95b service nova] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Refreshing network info cache for port 91d36c96-504d-42f3-b010-7ec883685b31 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1478.340480] env[69328]: DEBUG nova.objects.base [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=69328) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1478.367653] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d42c2202-5397-4314-9d1e-520ac94e32b7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.375349] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80fd862f-007d-4f09-80e6-b71066ebd03e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.405349] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95abc352-888c-4366-9c70-8baa10f6e2bb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.412126] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84f187dd-dac4-46c2-9a89-31707182e04d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.424880] env[69328]: DEBUG nova.compute.provider_tree [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1478.690550] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquiring lock "bb27c90e-af74-4bd4-9aa4-cc0d12e305e1" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1478.806608] env[69328]: DEBUG nova.network.neutron [req-59e20462-3224-4e53-b1cc-56712289de94 req-58b7f2f1-6312-4894-b774-e102309ed95b service nova] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Updated VIF entry in instance network info cache for port 91d36c96-504d-42f3-b010-7ec883685b31. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1478.806971] env[69328]: DEBUG nova.network.neutron [req-59e20462-3224-4e53-b1cc-56712289de94 req-58b7f2f1-6312-4894-b774-e102309ed95b service nova] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Updating instance_info_cache with network_info: [{"id": "91d36c96-504d-42f3-b010-7ec883685b31", "address": "fa:16:3e:94:8e:92", "network": {"id": "238bfce5-9117-4d8e-8dd7-445d8d894599", "bridge": null, "label": "tempest-ServerActionsTestOtherB-231958017-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8bbb75992830459c85c818e850261c61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap91d36c96-50", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1478.927702] env[69328]: DEBUG nova.scheduler.client.report [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1479.310357] env[69328]: DEBUG oslo_concurrency.lockutils [req-59e20462-3224-4e53-b1cc-56712289de94 req-58b7f2f1-6312-4894-b774-e102309ed95b service nova] Releasing lock "refresh_cache-bb27c90e-af74-4bd4-9aa4-cc0d12e305e1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1479.432649] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.098s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1479.940401] env[69328]: DEBUG oslo_concurrency.lockutils [None req-1ad97871-14db-4ea3-aefe-126a1fcf4e94 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "bb27c90e-af74-4bd4-9aa4-cc0d12e305e1" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 20.158s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1479.941295] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "bb27c90e-af74-4bd4-9aa4-cc0d12e305e1" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 1.251s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1479.941470] env[69328]: INFO nova.compute.manager [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Unshelving [ 1480.965437] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1480.965719] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1480.965956] env[69328]: DEBUG nova.objects.instance [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lazy-loading 'pci_requests' on Instance uuid bb27c90e-af74-4bd4-9aa4-cc0d12e305e1 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1481.471042] env[69328]: DEBUG nova.objects.instance [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lazy-loading 'numa_topology' on Instance uuid bb27c90e-af74-4bd4-9aa4-cc0d12e305e1 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1481.974089] env[69328]: INFO nova.compute.claims [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1483.009428] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-403d7350-62d8-4d7d-8704-46b00e9f2874 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.016980] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29417a89-167f-425b-890d-878333bd6f87 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.045843] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbf431df-f554-46f5-8c5d-b6d6094f54fd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.052694] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6647194-711f-4416-ac30-cc1fb9d4081c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.066052] env[69328]: DEBUG nova.compute.provider_tree [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1483.569270] env[69328]: DEBUG nova.scheduler.client.report [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1484.074663] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.109s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1484.103518] env[69328]: INFO nova.network.neutron [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Updating port 91d36c96-504d-42f3-b010-7ec883685b31 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1485.497946] env[69328]: DEBUG nova.compute.manager [req-3c8ee3ca-40b6-4e85-8338-a51640b94aa0 req-1d34d5c1-063a-4069-98fd-29a597b54971 service nova] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Received event network-vif-plugged-91d36c96-504d-42f3-b010-7ec883685b31 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1485.498231] env[69328]: DEBUG oslo_concurrency.lockutils [req-3c8ee3ca-40b6-4e85-8338-a51640b94aa0 req-1d34d5c1-063a-4069-98fd-29a597b54971 service nova] Acquiring lock "bb27c90e-af74-4bd4-9aa4-cc0d12e305e1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1485.498395] env[69328]: DEBUG oslo_concurrency.lockutils [req-3c8ee3ca-40b6-4e85-8338-a51640b94aa0 req-1d34d5c1-063a-4069-98fd-29a597b54971 service nova] Lock "bb27c90e-af74-4bd4-9aa4-cc0d12e305e1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1485.498557] env[69328]: DEBUG oslo_concurrency.lockutils [req-3c8ee3ca-40b6-4e85-8338-a51640b94aa0 req-1d34d5c1-063a-4069-98fd-29a597b54971 service nova] Lock "bb27c90e-af74-4bd4-9aa4-cc0d12e305e1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1485.498718] env[69328]: DEBUG nova.compute.manager [req-3c8ee3ca-40b6-4e85-8338-a51640b94aa0 req-1d34d5c1-063a-4069-98fd-29a597b54971 service nova] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] No waiting events found dispatching network-vif-plugged-91d36c96-504d-42f3-b010-7ec883685b31 {{(pid=69328) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1485.498895] env[69328]: WARNING nova.compute.manager [req-3c8ee3ca-40b6-4e85-8338-a51640b94aa0 req-1d34d5c1-063a-4069-98fd-29a597b54971 service nova] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Received unexpected event network-vif-plugged-91d36c96-504d-42f3-b010-7ec883685b31 for instance with vm_state shelved_offloaded and task_state spawning. [ 1485.573689] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquiring lock "refresh_cache-bb27c90e-af74-4bd4-9aa4-cc0d12e305e1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1485.573865] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquired lock "refresh_cache-bb27c90e-af74-4bd4-9aa4-cc0d12e305e1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1485.574109] env[69328]: DEBUG nova.network.neutron [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Building network info cache for instance {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1486.307986] env[69328]: DEBUG nova.network.neutron [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Updating instance_info_cache with network_info: [{"id": "91d36c96-504d-42f3-b010-7ec883685b31", "address": "fa:16:3e:94:8e:92", "network": {"id": "238bfce5-9117-4d8e-8dd7-445d8d894599", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-231958017-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8bbb75992830459c85c818e850261c61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3d7e184-c87f-47a5-8d0d-9fa20e07e669", "external-id": "nsx-vlan-transportzone-746", "segmentation_id": 746, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91d36c96-50", "ovs_interfaceid": "91d36c96-504d-42f3-b010-7ec883685b31", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1486.811554] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Releasing lock "refresh_cache-bb27c90e-af74-4bd4-9aa4-cc0d12e305e1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1486.839199] env[69328]: DEBUG nova.virt.hardware [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-03T17:33:57Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='f8a6114477e6005305059731f9e04b05',container_format='bare',created_at=2025-04-03T17:50:18Z,direct_url=,disk_format='vmdk',id=1b907037-24b5-4744-bd04-6dbc07359b5f,min_disk=1,min_ram=0,name='tempest-ServerActionsTestOtherB-server-803618932-shelved',owner='8bbb75992830459c85c818e850261c61',properties=ImageMetaProps,protected=,size=31666688,status='active',tags=,updated_at=2025-04-03T17:50:30Z,virtual_size=,visibility=), allow threads: False {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1486.839487] env[69328]: DEBUG nova.virt.hardware [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Flavor limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1486.839648] env[69328]: DEBUG nova.virt.hardware [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Image limits 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1486.839872] env[69328]: DEBUG nova.virt.hardware [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Flavor pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1486.840049] env[69328]: DEBUG nova.virt.hardware [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Image pref 0:0:0 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1486.840211] env[69328]: DEBUG nova.virt.hardware [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69328) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1486.840436] env[69328]: DEBUG nova.virt.hardware [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1486.840599] env[69328]: DEBUG nova.virt.hardware [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1486.840766] env[69328]: DEBUG nova.virt.hardware [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Got 1 possible topologies {{(pid=69328) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1486.840946] env[69328]: DEBUG nova.virt.hardware [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1486.841125] env[69328]: DEBUG nova.virt.hardware [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69328) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1486.841967] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c3fae34-bb9c-4bc0-9921-0c42889f289f {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.850207] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb2ef98f-223a-4a66-a009-7922cb37cb84 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.863315] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:94:8e:92', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f3d7e184-c87f-47a5-8d0d-9fa20e07e669', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '91d36c96-504d-42f3-b010-7ec883685b31', 'vif_model': 'vmxnet3'}] {{(pid=69328) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1486.870510] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1486.870725] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Creating VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1486.870921] env[69328]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a1b8232a-e63c-4c1d-92bb-d5aef54e3149 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.889651] env[69328]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1486.889651] env[69328]: value = "task-3274482" [ 1486.889651] env[69328]: _type = "Task" [ 1486.889651] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1486.896493] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274482, 'name': CreateVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.399869] env[69328]: DEBUG oslo_vmware.api [-] Task: {'id': task-3274482, 'name': CreateVM_Task, 'duration_secs': 0.294976} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1487.400056] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Created VM on the ESX host {{(pid=69328) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1487.400663] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1b907037-24b5-4744-bd04-6dbc07359b5f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1487.400827] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1b907037-24b5-4744-bd04-6dbc07359b5f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1487.401238] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1b907037-24b5-4744-bd04-6dbc07359b5f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1487.401486] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e486fe6-eabd-43f6-9bf9-da55e34674f0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.406116] env[69328]: DEBUG oslo_vmware.api [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1487.406116] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52603e1b-5c90-720d-db71-8aeaf36c55e2" [ 1487.406116] env[69328]: _type = "Task" [ 1487.406116] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1487.413419] env[69328]: DEBUG oslo_vmware.api [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]52603e1b-5c90-720d-db71-8aeaf36c55e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.527912] env[69328]: DEBUG nova.compute.manager [req-38a124c5-c0e0-4de6-a8fe-3c2818be0296 req-ef9228d9-ead9-420b-88f8-b0fba73d5331 service nova] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Received event network-changed-91d36c96-504d-42f3-b010-7ec883685b31 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1487.528137] env[69328]: DEBUG nova.compute.manager [req-38a124c5-c0e0-4de6-a8fe-3c2818be0296 req-ef9228d9-ead9-420b-88f8-b0fba73d5331 service nova] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Refreshing instance network info cache due to event network-changed-91d36c96-504d-42f3-b010-7ec883685b31. {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1487.528356] env[69328]: DEBUG oslo_concurrency.lockutils [req-38a124c5-c0e0-4de6-a8fe-3c2818be0296 req-ef9228d9-ead9-420b-88f8-b0fba73d5331 service nova] Acquiring lock "refresh_cache-bb27c90e-af74-4bd4-9aa4-cc0d12e305e1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1487.528500] env[69328]: DEBUG oslo_concurrency.lockutils [req-38a124c5-c0e0-4de6-a8fe-3c2818be0296 req-ef9228d9-ead9-420b-88f8-b0fba73d5331 service nova] Acquired lock "refresh_cache-bb27c90e-af74-4bd4-9aa4-cc0d12e305e1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1487.528658] env[69328]: DEBUG nova.network.neutron [req-38a124c5-c0e0-4de6-a8fe-3c2818be0296 req-ef9228d9-ead9-420b-88f8-b0fba73d5331 service nova] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Refreshing network info cache for port 91d36c96-504d-42f3-b010-7ec883685b31 {{(pid=69328) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1487.916643] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1b907037-24b5-4744-bd04-6dbc07359b5f" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1487.916984] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Processing image 1b907037-24b5-4744-bd04-6dbc07359b5f {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1487.917135] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1b907037-24b5-4744-bd04-6dbc07359b5f/1b907037-24b5-4744-bd04-6dbc07359b5f.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1487.917286] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1b907037-24b5-4744-bd04-6dbc07359b5f/1b907037-24b5-4744-bd04-6dbc07359b5f.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1487.917467] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1487.917706] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-39c03c00-e51b-4c31-9cef-cd76df459bbb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.926915] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1487.927095] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69328) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1487.927738] env[69328]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dca714ed-6842-4f5a-9c46-cc29230db9ee {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.932375] env[69328]: DEBUG oslo_vmware.api [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1487.932375] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]5290ebd4-41c5-e4d7-d68a-e5b96ebe6619" [ 1487.932375] env[69328]: _type = "Task" [ 1487.932375] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1487.939581] env[69328]: DEBUG oslo_vmware.api [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': session[527fc292-6741-f48d-313f-2d0c02ad0f69]5290ebd4-41c5-e4d7-d68a-e5b96ebe6619, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.206728] env[69328]: DEBUG nova.network.neutron [req-38a124c5-c0e0-4de6-a8fe-3c2818be0296 req-ef9228d9-ead9-420b-88f8-b0fba73d5331 service nova] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Updated VIF entry in instance network info cache for port 91d36c96-504d-42f3-b010-7ec883685b31. {{(pid=69328) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1488.207157] env[69328]: DEBUG nova.network.neutron [req-38a124c5-c0e0-4de6-a8fe-3c2818be0296 req-ef9228d9-ead9-420b-88f8-b0fba73d5331 service nova] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Updating instance_info_cache with network_info: [{"id": "91d36c96-504d-42f3-b010-7ec883685b31", "address": "fa:16:3e:94:8e:92", "network": {"id": "238bfce5-9117-4d8e-8dd7-445d8d894599", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-231958017-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8bbb75992830459c85c818e850261c61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3d7e184-c87f-47a5-8d0d-9fa20e07e669", "external-id": "nsx-vlan-transportzone-746", "segmentation_id": 746, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91d36c96-50", "ovs_interfaceid": "91d36c96-504d-42f3-b010-7ec883685b31", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1488.442518] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Preparing fetch location {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1488.442721] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Fetch image to [datastore1] OSTACK_IMG_b0948b78-b4d3-4af1-8c29-9a85d25c3524/OSTACK_IMG_b0948b78-b4d3-4af1-8c29-9a85d25c3524.vmdk {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1488.442906] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Downloading stream optimized image 1b907037-24b5-4744-bd04-6dbc07359b5f to [datastore1] OSTACK_IMG_b0948b78-b4d3-4af1-8c29-9a85d25c3524/OSTACK_IMG_b0948b78-b4d3-4af1-8c29-9a85d25c3524.vmdk on the data store datastore1 as vApp {{(pid=69328) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1488.443089] env[69328]: DEBUG nova.virt.vmwareapi.images [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Downloading image file data 1b907037-24b5-4744-bd04-6dbc07359b5f to the ESX as VM named 'OSTACK_IMG_b0948b78-b4d3-4af1-8c29-9a85d25c3524' {{(pid=69328) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1488.506586] env[69328]: DEBUG oslo_vmware.rw_handles [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1488.506586] env[69328]: value = "resgroup-9" [ 1488.506586] env[69328]: _type = "ResourcePool" [ 1488.506586] env[69328]: }. {{(pid=69328) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1488.506876] env[69328]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-fbafe228-1f75-4303-bb95-6ba7058f48bb {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.527541] env[69328]: DEBUG oslo_vmware.rw_handles [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lease: (returnval){ [ 1488.527541] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d3fc6b-b65c-1fb0-e8fd-7d0ee3de0a0c" [ 1488.527541] env[69328]: _type = "HttpNfcLease" [ 1488.527541] env[69328]: } obtained for vApp import into resource pool (val){ [ 1488.527541] env[69328]: value = "resgroup-9" [ 1488.527541] env[69328]: _type = "ResourcePool" [ 1488.527541] env[69328]: }. {{(pid=69328) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1488.527857] env[69328]: DEBUG oslo_vmware.api [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the lease: (returnval){ [ 1488.527857] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d3fc6b-b65c-1fb0-e8fd-7d0ee3de0a0c" [ 1488.527857] env[69328]: _type = "HttpNfcLease" [ 1488.527857] env[69328]: } to be ready. {{(pid=69328) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1488.533730] env[69328]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1488.533730] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d3fc6b-b65c-1fb0-e8fd-7d0ee3de0a0c" [ 1488.533730] env[69328]: _type = "HttpNfcLease" [ 1488.533730] env[69328]: } is initializing. {{(pid=69328) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1488.710311] env[69328]: DEBUG oslo_concurrency.lockutils [req-38a124c5-c0e0-4de6-a8fe-3c2818be0296 req-ef9228d9-ead9-420b-88f8-b0fba73d5331 service nova] Releasing lock "refresh_cache-bb27c90e-af74-4bd4-9aa4-cc0d12e305e1" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1489.035519] env[69328]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1489.035519] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d3fc6b-b65c-1fb0-e8fd-7d0ee3de0a0c" [ 1489.035519] env[69328]: _type = "HttpNfcLease" [ 1489.035519] env[69328]: } is ready. {{(pid=69328) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1489.035967] env[69328]: DEBUG oslo_vmware.rw_handles [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1489.035967] env[69328]: value = "session[527fc292-6741-f48d-313f-2d0c02ad0f69]52d3fc6b-b65c-1fb0-e8fd-7d0ee3de0a0c" [ 1489.035967] env[69328]: _type = "HttpNfcLease" [ 1489.035967] env[69328]: }. {{(pid=69328) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1489.036537] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e02888bd-cb66-4474-bed7-83418da54850 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.043599] env[69328]: DEBUG oslo_vmware.rw_handles [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5253509b-fa29-0de6-8f37-8f93efaa6f4f/disk-0.vmdk from lease info. {{(pid=69328) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1489.043774] env[69328]: DEBUG oslo_vmware.rw_handles [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Creating HTTP connection to write to file with size = 31666688 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5253509b-fa29-0de6-8f37-8f93efaa6f4f/disk-0.vmdk. {{(pid=69328) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1489.105305] env[69328]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-3f8944a5-4779-44f5-b059-99008e3ffe70 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.148978] env[69328]: DEBUG oslo_vmware.rw_handles [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Completed reading data from the image iterator. {{(pid=69328) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1490.149338] env[69328]: DEBUG oslo_vmware.rw_handles [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5253509b-fa29-0de6-8f37-8f93efaa6f4f/disk-0.vmdk. {{(pid=69328) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1490.150080] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d4fcb12-1570-406c-9995-55b3e4e50f3e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.156592] env[69328]: DEBUG oslo_vmware.rw_handles [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5253509b-fa29-0de6-8f37-8f93efaa6f4f/disk-0.vmdk is in state: ready. {{(pid=69328) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1490.156755] env[69328]: DEBUG oslo_vmware.rw_handles [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5253509b-fa29-0de6-8f37-8f93efaa6f4f/disk-0.vmdk. {{(pid=69328) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1490.156974] env[69328]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-4c0c7d49-43cb-4e82-945d-677146a8f4fd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.340317] env[69328]: DEBUG oslo_vmware.rw_handles [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5253509b-fa29-0de6-8f37-8f93efaa6f4f/disk-0.vmdk. {{(pid=69328) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1490.340571] env[69328]: INFO nova.virt.vmwareapi.images [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Downloaded image file data 1b907037-24b5-4744-bd04-6dbc07359b5f [ 1490.341508] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2175de6-293f-4d88-8078-e0e6440e4ce6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.356806] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b6dfdad9-dd7d-4ad8-b34a-2e797dabc106 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.385340] env[69328]: INFO nova.virt.vmwareapi.images [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] The imported VM was unregistered [ 1490.387500] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Caching image {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1490.387734] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Creating directory with path [datastore1] devstack-image-cache_base/1b907037-24b5-4744-bd04-6dbc07359b5f {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1490.388043] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5cfea095-117a-4654-9604-ace483b97957 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.397385] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Created directory with path [datastore1] devstack-image-cache_base/1b907037-24b5-4744-bd04-6dbc07359b5f {{(pid=69328) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1490.397563] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_b0948b78-b4d3-4af1-8c29-9a85d25c3524/OSTACK_IMG_b0948b78-b4d3-4af1-8c29-9a85d25c3524.vmdk to [datastore1] devstack-image-cache_base/1b907037-24b5-4744-bd04-6dbc07359b5f/1b907037-24b5-4744-bd04-6dbc07359b5f.vmdk. {{(pid=69328) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1490.397782] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-5cc75a03-5073-4111-9fa7-8aa63c832608 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.404125] env[69328]: DEBUG oslo_vmware.api [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1490.404125] env[69328]: value = "task-3274485" [ 1490.404125] env[69328]: _type = "Task" [ 1490.404125] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1490.411199] env[69328]: DEBUG oslo_vmware.api [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274485, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.913975] env[69328]: DEBUG oslo_vmware.api [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274485, 'name': MoveVirtualDisk_Task} progress is 26%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.415415] env[69328]: DEBUG oslo_vmware.api [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274485, 'name': MoveVirtualDisk_Task} progress is 49%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.915810] env[69328]: DEBUG oslo_vmware.api [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274485, 'name': MoveVirtualDisk_Task} progress is 71%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.418502] env[69328]: DEBUG oslo_vmware.api [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274485, 'name': MoveVirtualDisk_Task} progress is 97%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.917688] env[69328]: DEBUG oslo_vmware.api [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274485, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.312237} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1492.917979] env[69328]: INFO nova.virt.vmwareapi.ds_util [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_b0948b78-b4d3-4af1-8c29-9a85d25c3524/OSTACK_IMG_b0948b78-b4d3-4af1-8c29-9a85d25c3524.vmdk to [datastore1] devstack-image-cache_base/1b907037-24b5-4744-bd04-6dbc07359b5f/1b907037-24b5-4744-bd04-6dbc07359b5f.vmdk. [ 1492.918193] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Cleaning up location [datastore1] OSTACK_IMG_b0948b78-b4d3-4af1-8c29-9a85d25c3524 {{(pid=69328) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1492.918360] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_b0948b78-b4d3-4af1-8c29-9a85d25c3524 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1492.918609] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6cc384fb-d95a-48e1-abe4-5b9d3503f53d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.924631] env[69328]: DEBUG oslo_vmware.api [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1492.924631] env[69328]: value = "task-3274486" [ 1492.924631] env[69328]: _type = "Task" [ 1492.924631] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1492.931561] env[69328]: DEBUG oslo_vmware.api [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274486, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.435604] env[69328]: DEBUG oslo_vmware.api [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274486, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.034317} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1493.435907] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1493.436020] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1b907037-24b5-4744-bd04-6dbc07359b5f/1b907037-24b5-4744-bd04-6dbc07359b5f.vmdk" {{(pid=69328) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1493.436282] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1b907037-24b5-4744-bd04-6dbc07359b5f/1b907037-24b5-4744-bd04-6dbc07359b5f.vmdk to [datastore1] bb27c90e-af74-4bd4-9aa4-cc0d12e305e1/bb27c90e-af74-4bd4-9aa4-cc0d12e305e1.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1493.436528] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d568550c-ebc8-4056-ba78-2a71ce9157d7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.443111] env[69328]: DEBUG oslo_vmware.api [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1493.443111] env[69328]: value = "task-3274487" [ 1493.443111] env[69328]: _type = "Task" [ 1493.443111] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1493.450297] env[69328]: DEBUG oslo_vmware.api [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274487, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.953426] env[69328]: DEBUG oslo_vmware.api [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274487, 'name': CopyVirtualDisk_Task} progress is 24%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.454249] env[69328]: DEBUG oslo_vmware.api [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274487, 'name': CopyVirtualDisk_Task} progress is 49%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.955447] env[69328]: DEBUG oslo_vmware.api [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274487, 'name': CopyVirtualDisk_Task} progress is 74%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.456785] env[69328]: DEBUG oslo_vmware.api [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274487, 'name': CopyVirtualDisk_Task} progress is 97%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.957119] env[69328]: DEBUG oslo_vmware.api [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274487, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.046276} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.957396] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1b907037-24b5-4744-bd04-6dbc07359b5f/1b907037-24b5-4744-bd04-6dbc07359b5f.vmdk to [datastore1] bb27c90e-af74-4bd4-9aa4-cc0d12e305e1/bb27c90e-af74-4bd4-9aa4-cc0d12e305e1.vmdk {{(pid=69328) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1495.958230] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65685c54-46ae-4f8b-892b-06ef147bc3dc {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.980147] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Reconfiguring VM instance instance-0000007f to attach disk [datastore1] bb27c90e-af74-4bd4-9aa4-cc0d12e305e1/bb27c90e-af74-4bd4-9aa4-cc0d12e305e1.vmdk or device None with type streamOptimized {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1495.980438] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c74c4da0-f221-48cd-a542-14e591c7195b {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.999446] env[69328]: DEBUG oslo_vmware.api [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1495.999446] env[69328]: value = "task-3274488" [ 1495.999446] env[69328]: _type = "Task" [ 1495.999446] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.007405] env[69328]: DEBUG oslo_vmware.api [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274488, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.509225] env[69328]: DEBUG oslo_vmware.api [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274488, 'name': ReconfigVM_Task, 'duration_secs': 0.288547} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.509570] env[69328]: DEBUG nova.virt.vmwareapi.volumeops [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Reconfigured VM instance instance-0000007f to attach disk [datastore1] bb27c90e-af74-4bd4-9aa4-cc0d12e305e1/bb27c90e-af74-4bd4-9aa4-cc0d12e305e1.vmdk or device None with type streamOptimized {{(pid=69328) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1496.510150] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9c7ef2b4-42b2-4222-b853-efe5a7796c34 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.516347] env[69328]: DEBUG oslo_vmware.api [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1496.516347] env[69328]: value = "task-3274489" [ 1496.516347] env[69328]: _type = "Task" [ 1496.516347] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.523647] env[69328]: DEBUG oslo_vmware.api [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274489, 'name': Rename_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.026169] env[69328]: DEBUG oslo_vmware.api [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274489, 'name': Rename_Task, 'duration_secs': 0.14733} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.026491] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Powering on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1497.026735] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4d4b69cc-5a2c-48ec-a4df-f97f727b6859 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.033693] env[69328]: DEBUG oslo_vmware.api [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1497.033693] env[69328]: value = "task-3274490" [ 1497.033693] env[69328]: _type = "Task" [ 1497.033693] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.041715] env[69328]: DEBUG oslo_vmware.api [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274490, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.544678] env[69328]: DEBUG oslo_vmware.api [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274490, 'name': PowerOnVM_Task, 'duration_secs': 0.442766} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.544984] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Powered on the VM {{(pid=69328) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1497.636202] env[69328]: DEBUG nova.compute.manager [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Checking state {{(pid=69328) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1497.637101] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22f0e004-c270-4110-b634-a685ae2f5984 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.153614] env[69328]: DEBUG oslo_concurrency.lockutils [None req-bef7b103-123e-4e48-b39d-3dd7dbe8b2b3 tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "bb27c90e-af74-4bd4-9aa4-cc0d12e305e1" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 18.212s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1499.486453] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5c3c1168-2b6f-4ba6-a4af-8e15cbb5d64a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquiring lock "bb27c90e-af74-4bd4-9aa4-cc0d12e305e1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1499.486794] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5c3c1168-2b6f-4ba6-a4af-8e15cbb5d64a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "bb27c90e-af74-4bd4-9aa4-cc0d12e305e1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1499.486926] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5c3c1168-2b6f-4ba6-a4af-8e15cbb5d64a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquiring lock "bb27c90e-af74-4bd4-9aa4-cc0d12e305e1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1499.487507] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5c3c1168-2b6f-4ba6-a4af-8e15cbb5d64a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "bb27c90e-af74-4bd4-9aa4-cc0d12e305e1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1499.487710] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5c3c1168-2b6f-4ba6-a4af-8e15cbb5d64a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "bb27c90e-af74-4bd4-9aa4-cc0d12e305e1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1499.489822] env[69328]: INFO nova.compute.manager [None req-5c3c1168-2b6f-4ba6-a4af-8e15cbb5d64a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Terminating instance [ 1499.993444] env[69328]: DEBUG nova.compute.manager [None req-5c3c1168-2b6f-4ba6-a4af-8e15cbb5d64a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Start destroying the instance on the hypervisor. {{(pid=69328) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1499.993769] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-5c3c1168-2b6f-4ba6-a4af-8e15cbb5d64a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Destroying instance {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1499.994873] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a31e4d2-8e8a-4c70-81bc-e7f53dce39e0 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.003711] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c3c1168-2b6f-4ba6-a4af-8e15cbb5d64a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Powering off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1500.003969] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1006702d-5961-41d3-be12-b22724523828 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.010698] env[69328]: DEBUG oslo_vmware.api [None req-5c3c1168-2b6f-4ba6-a4af-8e15cbb5d64a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1500.010698] env[69328]: value = "task-3274491" [ 1500.010698] env[69328]: _type = "Task" [ 1500.010698] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1500.019702] env[69328]: DEBUG oslo_vmware.api [None req-5c3c1168-2b6f-4ba6-a4af-8e15cbb5d64a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274491, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.521549] env[69328]: DEBUG oslo_vmware.api [None req-5c3c1168-2b6f-4ba6-a4af-8e15cbb5d64a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274491, 'name': PowerOffVM_Task, 'duration_secs': 0.209954} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1500.521906] env[69328]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c3c1168-2b6f-4ba6-a4af-8e15cbb5d64a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Powered off the VM {{(pid=69328) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1500.522041] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-5c3c1168-2b6f-4ba6-a4af-8e15cbb5d64a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Unregistering the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1500.522233] env[69328]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-edf7418d-5b40-4d27-8b4a-40fb77b2466c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.584543] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-5c3c1168-2b6f-4ba6-a4af-8e15cbb5d64a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Unregistered the VM {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1500.584888] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-5c3c1168-2b6f-4ba6-a4af-8e15cbb5d64a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Deleting contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1500.584976] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c3c1168-2b6f-4ba6-a4af-8e15cbb5d64a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Deleting the datastore file [datastore1] bb27c90e-af74-4bd4-9aa4-cc0d12e305e1 {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1500.585214] env[69328]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ab0ab345-2828-4527-8a07-0db7adf600d7 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.590701] env[69328]: DEBUG oslo_vmware.api [None req-5c3c1168-2b6f-4ba6-a4af-8e15cbb5d64a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for the task: (returnval){ [ 1500.590701] env[69328]: value = "task-3274493" [ 1500.590701] env[69328]: _type = "Task" [ 1500.590701] env[69328]: } to complete. {{(pid=69328) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1500.597833] env[69328]: DEBUG oslo_vmware.api [None req-5c3c1168-2b6f-4ba6-a4af-8e15cbb5d64a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274493, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.100056] env[69328]: DEBUG oslo_vmware.api [None req-5c3c1168-2b6f-4ba6-a4af-8e15cbb5d64a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Task: {'id': task-3274493, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.125074} completed successfully. {{(pid=69328) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1501.100309] env[69328]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c3c1168-2b6f-4ba6-a4af-8e15cbb5d64a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Deleted the datastore file {{(pid=69328) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1501.100497] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-5c3c1168-2b6f-4ba6-a4af-8e15cbb5d64a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Deleted contents of the VM from datastore datastore1 {{(pid=69328) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1501.100665] env[69328]: DEBUG nova.virt.vmwareapi.vmops [None req-5c3c1168-2b6f-4ba6-a4af-8e15cbb5d64a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Instance destroyed {{(pid=69328) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1501.100837] env[69328]: INFO nova.compute.manager [None req-5c3c1168-2b6f-4ba6-a4af-8e15cbb5d64a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1501.101095] env[69328]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5c3c1168-2b6f-4ba6-a4af-8e15cbb5d64a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69328) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1501.101291] env[69328]: DEBUG nova.compute.manager [-] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Deallocating network for instance {{(pid=69328) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1501.101385] env[69328]: DEBUG nova.network.neutron [-] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] deallocate_for_instance() {{(pid=69328) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1501.533296] env[69328]: DEBUG nova.compute.manager [req-e3be7b9b-3850-4228-8fbc-c3ebe764a45e req-61862598-bd18-484e-82c3-886403e5bdee service nova] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Received event network-vif-deleted-91d36c96-504d-42f3-b010-7ec883685b31 {{(pid=69328) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1501.534081] env[69328]: INFO nova.compute.manager [req-e3be7b9b-3850-4228-8fbc-c3ebe764a45e req-61862598-bd18-484e-82c3-886403e5bdee service nova] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Neutron deleted interface 91d36c96-504d-42f3-b010-7ec883685b31; detaching it from the instance and deleting it from the info cache [ 1501.534081] env[69328]: DEBUG nova.network.neutron [req-e3be7b9b-3850-4228-8fbc-c3ebe764a45e req-61862598-bd18-484e-82c3-886403e5bdee service nova] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1501.986702] env[69328]: DEBUG nova.network.neutron [-] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Updating instance_info_cache with network_info: [] {{(pid=69328) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1502.035769] env[69328]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ca6bf3f6-e31d-4825-ad40-5035723a519c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.045473] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02ba1752-fbac-489a-86cb-467d18e9d756 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.068857] env[69328]: DEBUG nova.compute.manager [req-e3be7b9b-3850-4228-8fbc-c3ebe764a45e req-61862598-bd18-484e-82c3-886403e5bdee service nova] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Detach interface failed, port_id=91d36c96-504d-42f3-b010-7ec883685b31, reason: Instance bb27c90e-af74-4bd4-9aa4-cc0d12e305e1 could not be found. {{(pid=69328) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1502.489478] env[69328]: INFO nova.compute.manager [-] [instance: bb27c90e-af74-4bd4-9aa4-cc0d12e305e1] Took 1.39 seconds to deallocate network for instance. [ 1502.995837] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5c3c1168-2b6f-4ba6-a4af-8e15cbb5d64a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1502.996205] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5c3c1168-2b6f-4ba6-a4af-8e15cbb5d64a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1502.996381] env[69328]: DEBUG nova.objects.instance [None req-5c3c1168-2b6f-4ba6-a4af-8e15cbb5d64a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lazy-loading 'resources' on Instance uuid bb27c90e-af74-4bd4-9aa4-cc0d12e305e1 {{(pid=69328) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1503.517219] env[69328]: DEBUG nova.scheduler.client.report [None req-5c3c1168-2b6f-4ba6-a4af-8e15cbb5d64a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Refreshing inventories for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1503.531332] env[69328]: DEBUG nova.scheduler.client.report [None req-5c3c1168-2b6f-4ba6-a4af-8e15cbb5d64a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Updating ProviderTree inventory for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1503.531572] env[69328]: DEBUG nova.compute.provider_tree [None req-5c3c1168-2b6f-4ba6-a4af-8e15cbb5d64a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Updating inventory in ProviderTree for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1503.541011] env[69328]: DEBUG nova.scheduler.client.report [None req-5c3c1168-2b6f-4ba6-a4af-8e15cbb5d64a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Refreshing aggregate associations for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e, aggregates: None {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1503.557881] env[69328]: DEBUG nova.scheduler.client.report [None req-5c3c1168-2b6f-4ba6-a4af-8e15cbb5d64a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Refreshing trait associations for resource provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e, traits: COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=69328) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1503.582552] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41a1ef0a-2fcd-4f95-a6ad-91bd38201037 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.589964] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-946d1e22-7c63-4a62-a04f-98a27e1c808c {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.619655] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9556d0f-8994-4c75-bfb4-7d6b97ba4a0d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.626381] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-829f8c52-50de-442d-a442-6799c7545c11 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.640025] env[69328]: DEBUG nova.compute.provider_tree [None req-5c3c1168-2b6f-4ba6-a4af-8e15cbb5d64a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1504.143492] env[69328]: DEBUG nova.scheduler.client.report [None req-5c3c1168-2b6f-4ba6-a4af-8e15cbb5d64a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1504.648818] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5c3c1168-2b6f-4ba6-a4af-8e15cbb5d64a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.652s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1504.670981] env[69328]: INFO nova.scheduler.client.report [None req-5c3c1168-2b6f-4ba6-a4af-8e15cbb5d64a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Deleted allocations for instance bb27c90e-af74-4bd4-9aa4-cc0d12e305e1 [ 1505.178829] env[69328]: DEBUG oslo_concurrency.lockutils [None req-5c3c1168-2b6f-4ba6-a4af-8e15cbb5d64a tempest-ServerActionsTestOtherB-2041880398 tempest-ServerActionsTestOtherB-2041880398-project-member] Lock "bb27c90e-af74-4bd4-9aa4-cc0d12e305e1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.691s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1512.744818] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1512.745156] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1513.249863] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1513.250250] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1513.250536] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1513.250964] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1513.251156] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1513.251387] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1513.251612] env[69328]: DEBUG nova.compute.manager [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69328) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1513.251910] env[69328]: DEBUG oslo_service.periodic_task [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Running periodic task ComputeManager.update_available_resource {{(pid=69328) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1513.754864] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1513.755321] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1513.755366] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1513.755497] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69328) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1513.756536] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87f1a10e-8ffb-408d-a8ca-13ffd056ed1d {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.764659] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9559c99e-d50a-4279-894f-0c9ee7176b7e {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.778548] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3774eb23-1aee-437c-b09e-32557fd3abf9 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.784723] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1adbe08a-d9da-4034-a53c-75f913bca292 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.813915] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180815MB free_disk=116GB free_vcpus=48 pci_devices=None {{(pid=69328) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1513.814079] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1513.814315] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1514.834802] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=69328) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1514.835069] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=69328) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1514.849395] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d87005b-5a2b-4287-8886-ecfdb0f16bd6 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.856394] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bacaa48b-8899-431e-848a-cec0bd8c3afd {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.885149] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eed80e0-7a39-4579-a69c-9d4c7e1554e8 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.891916] env[69328]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9635982e-4b2d-4baa-81c7-42c283feaa44 {{(pid=69328) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.904280] env[69328]: DEBUG nova.compute.provider_tree [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Inventory has not changed in ProviderTree for provider: 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e {{(pid=69328) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1515.407733] env[69328]: DEBUG nova.scheduler.client.report [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Inventory has not changed for provider 149c5b9c-6e66-4569-bb7c-ff2bc0d6ce4e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 116, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69328) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1515.913850] env[69328]: DEBUG nova.compute.resource_tracker [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69328) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1515.914242] env[69328]: DEBUG oslo_concurrency.lockutils [None req-b77bf389-dd71-4435-9a8f-1df3d076166d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.100s {{(pid=69328) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}}